All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.gcp.dataproc.kotlin.MetastoreServiceArgs.kt Maven / Gradle / Ivy

Go to download

Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.

There is a newer version: 8.10.0.0
Show newest version
@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.gcp.dataproc.kotlin

import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.gcp.dataproc.MetastoreServiceArgs.builder
import com.pulumi.gcp.dataproc.kotlin.inputs.MetastoreServiceEncryptionConfigArgs
import com.pulumi.gcp.dataproc.kotlin.inputs.MetastoreServiceEncryptionConfigArgsBuilder
import com.pulumi.gcp.dataproc.kotlin.inputs.MetastoreServiceHiveMetastoreConfigArgs
import com.pulumi.gcp.dataproc.kotlin.inputs.MetastoreServiceHiveMetastoreConfigArgsBuilder
import com.pulumi.gcp.dataproc.kotlin.inputs.MetastoreServiceMaintenanceWindowArgs
import com.pulumi.gcp.dataproc.kotlin.inputs.MetastoreServiceMaintenanceWindowArgsBuilder
import com.pulumi.gcp.dataproc.kotlin.inputs.MetastoreServiceMetadataIntegrationArgs
import com.pulumi.gcp.dataproc.kotlin.inputs.MetastoreServiceMetadataIntegrationArgsBuilder
import com.pulumi.gcp.dataproc.kotlin.inputs.MetastoreServiceNetworkConfigArgs
import com.pulumi.gcp.dataproc.kotlin.inputs.MetastoreServiceNetworkConfigArgsBuilder
import com.pulumi.gcp.dataproc.kotlin.inputs.MetastoreServiceScalingConfigArgs
import com.pulumi.gcp.dataproc.kotlin.inputs.MetastoreServiceScalingConfigArgsBuilder
import com.pulumi.gcp.dataproc.kotlin.inputs.MetastoreServiceScheduledBackupArgs
import com.pulumi.gcp.dataproc.kotlin.inputs.MetastoreServiceScheduledBackupArgsBuilder
import com.pulumi.gcp.dataproc.kotlin.inputs.MetastoreServiceTelemetryConfigArgs
import com.pulumi.gcp.dataproc.kotlin.inputs.MetastoreServiceTelemetryConfigArgsBuilder
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.applySuspend
import kotlin.Int
import kotlin.Pair
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.collections.Map
import kotlin.jvm.JvmName

/**
 * A managed metastore service that serves metadata queries.
 * To get more information about Service, see:
 * * [API documentation](https://cloud.google.com/dataproc-metastore/docs/reference/rest/v1/projects.locations.services)
 * * How-to Guides
 *     * [Official Documentation](https://cloud.google.com/dataproc-metastore/docs/overview)
 * ## Example Usage
 * ### Dataproc Metastore Service Basic
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const _default = new gcp.dataproc.MetastoreService("default", {
 *     serviceId: "metastore-srv",
 *     location: "us-central1",
 *     port: 9080,
 *     tier: "DEVELOPER",
 *     maintenanceWindow: {
 *         hourOfDay: 2,
 *         dayOfWeek: "SUNDAY",
 *     },
 *     hiveMetastoreConfig: {
 *         version: "2.3.6",
 *     },
 *     labels: {
 *         env: "test",
 *     },
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * default = gcp.dataproc.MetastoreService("default",
 *     service_id="metastore-srv",
 *     location="us-central1",
 *     port=9080,
 *     tier="DEVELOPER",
 *     maintenance_window=gcp.dataproc.MetastoreServiceMaintenanceWindowArgs(
 *         hour_of_day=2,
 *         day_of_week="SUNDAY",
 *     ),
 *     hive_metastore_config=gcp.dataproc.MetastoreServiceHiveMetastoreConfigArgs(
 *         version="2.3.6",
 *     ),
 *     labels={
 *         "env": "test",
 *     })
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var @default = new Gcp.Dataproc.MetastoreService("default", new()
 *     {
 *         ServiceId = "metastore-srv",
 *         Location = "us-central1",
 *         Port = 9080,
 *         Tier = "DEVELOPER",
 *         MaintenanceWindow = new Gcp.Dataproc.Inputs.MetastoreServiceMaintenanceWindowArgs
 *         {
 *             HourOfDay = 2,
 *             DayOfWeek = "SUNDAY",
 *         },
 *         HiveMetastoreConfig = new Gcp.Dataproc.Inputs.MetastoreServiceHiveMetastoreConfigArgs
 *         {
 *             Version = "2.3.6",
 *         },
 *         Labels =
 *         {
 *             { "env", "test" },
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataproc"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		_, err := dataproc.NewMetastoreService(ctx, "default", &dataproc.MetastoreServiceArgs{
 * 			ServiceId: pulumi.String("metastore-srv"),
 * 			Location:  pulumi.String("us-central1"),
 * 			Port:      pulumi.Int(9080),
 * 			Tier:      pulumi.String("DEVELOPER"),
 * 			MaintenanceWindow: &dataproc.MetastoreServiceMaintenanceWindowArgs{
 * 				HourOfDay: pulumi.Int(2),
 * 				DayOfWeek: pulumi.String("SUNDAY"),
 * 			},
 * 			HiveMetastoreConfig: &dataproc.MetastoreServiceHiveMetastoreConfigArgs{
 * 				Version: pulumi.String("2.3.6"),
 * 			},
 * 			Labels: pulumi.StringMap{
 * 				"env": pulumi.String("test"),
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.dataproc.MetastoreService;
 * import com.pulumi.gcp.dataproc.MetastoreServiceArgs;
 * import com.pulumi.gcp.dataproc.inputs.MetastoreServiceMaintenanceWindowArgs;
 * import com.pulumi.gcp.dataproc.inputs.MetastoreServiceHiveMetastoreConfigArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var default_ = new MetastoreService("default", MetastoreServiceArgs.builder()
 *             .serviceId("metastore-srv")
 *             .location("us-central1")
 *             .port(9080)
 *             .tier("DEVELOPER")
 *             .maintenanceWindow(MetastoreServiceMaintenanceWindowArgs.builder()
 *                 .hourOfDay(2)
 *                 .dayOfWeek("SUNDAY")
 *                 .build())
 *             .hiveMetastoreConfig(MetastoreServiceHiveMetastoreConfigArgs.builder()
 *                 .version("2.3.6")
 *                 .build())
 *             .labels(Map.of("env", "test"))
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   default:
 *     type: gcp:dataproc:MetastoreService
 *     properties:
 *       serviceId: metastore-srv
 *       location: us-central1
 *       port: 9080
 *       tier: DEVELOPER
 *       maintenanceWindow:
 *         hourOfDay: 2
 *         dayOfWeek: SUNDAY
 *       hiveMetastoreConfig:
 *         version: 2.3.6
 *       labels:
 *         env: test
 * ```
 * 
 * ### Dataproc Metastore Service Cmek Example
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const keyRing = new gcp.kms.KeyRing("key_ring", {
 *     name: "example-keyring",
 *     location: "us-central1",
 * });
 * const cryptoKey = new gcp.kms.CryptoKey("crypto_key", {
 *     name: "example-key",
 *     keyRing: keyRing.id,
 *     purpose: "ENCRYPT_DECRYPT",
 * });
 * const _default = new gcp.dataproc.MetastoreService("default", {
 *     serviceId: "example-service",
 *     location: "us-central1",
 *     encryptionConfig: {
 *         kmsKey: cryptoKey.id,
 *     },
 *     hiveMetastoreConfig: {
 *         version: "3.1.2",
 *     },
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * key_ring = gcp.kms.KeyRing("key_ring",
 *     name="example-keyring",
 *     location="us-central1")
 * crypto_key = gcp.kms.CryptoKey("crypto_key",
 *     name="example-key",
 *     key_ring=key_ring.id,
 *     purpose="ENCRYPT_DECRYPT")
 * default = gcp.dataproc.MetastoreService("default",
 *     service_id="example-service",
 *     location="us-central1",
 *     encryption_config=gcp.dataproc.MetastoreServiceEncryptionConfigArgs(
 *         kms_key=crypto_key.id,
 *     ),
 *     hive_metastore_config=gcp.dataproc.MetastoreServiceHiveMetastoreConfigArgs(
 *         version="3.1.2",
 *     ))
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var keyRing = new Gcp.Kms.KeyRing("key_ring", new()
 *     {
 *         Name = "example-keyring",
 *         Location = "us-central1",
 *     });
 *     var cryptoKey = new Gcp.Kms.CryptoKey("crypto_key", new()
 *     {
 *         Name = "example-key",
 *         KeyRing = keyRing.Id,
 *         Purpose = "ENCRYPT_DECRYPT",
 *     });
 *     var @default = new Gcp.Dataproc.MetastoreService("default", new()
 *     {
 *         ServiceId = "example-service",
 *         Location = "us-central1",
 *         EncryptionConfig = new Gcp.Dataproc.Inputs.MetastoreServiceEncryptionConfigArgs
 *         {
 *             KmsKey = cryptoKey.Id,
 *         },
 *         HiveMetastoreConfig = new Gcp.Dataproc.Inputs.MetastoreServiceHiveMetastoreConfigArgs
 *         {
 *             Version = "3.1.2",
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataproc"
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/kms"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		keyRing, err := kms.NewKeyRing(ctx, "key_ring", &kms.KeyRingArgs{
 * 			Name:     pulumi.String("example-keyring"),
 * 			Location: pulumi.String("us-central1"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		cryptoKey, err := kms.NewCryptoKey(ctx, "crypto_key", &kms.CryptoKeyArgs{
 * 			Name:    pulumi.String("example-key"),
 * 			KeyRing: keyRing.ID(),
 * 			Purpose: pulumi.String("ENCRYPT_DECRYPT"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = dataproc.NewMetastoreService(ctx, "default", &dataproc.MetastoreServiceArgs{
 * 			ServiceId: pulumi.String("example-service"),
 * 			Location:  pulumi.String("us-central1"),
 * 			EncryptionConfig: &dataproc.MetastoreServiceEncryptionConfigArgs{
 * 				KmsKey: cryptoKey.ID(),
 * 			},
 * 			HiveMetastoreConfig: &dataproc.MetastoreServiceHiveMetastoreConfigArgs{
 * 				Version: pulumi.String("3.1.2"),
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.kms.KeyRing;
 * import com.pulumi.gcp.kms.KeyRingArgs;
 * import com.pulumi.gcp.kms.CryptoKey;
 * import com.pulumi.gcp.kms.CryptoKeyArgs;
 * import com.pulumi.gcp.dataproc.MetastoreService;
 * import com.pulumi.gcp.dataproc.MetastoreServiceArgs;
 * import com.pulumi.gcp.dataproc.inputs.MetastoreServiceEncryptionConfigArgs;
 * import com.pulumi.gcp.dataproc.inputs.MetastoreServiceHiveMetastoreConfigArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var keyRing = new KeyRing("keyRing", KeyRingArgs.builder()
 *             .name("example-keyring")
 *             .location("us-central1")
 *             .build());
 *         var cryptoKey = new CryptoKey("cryptoKey", CryptoKeyArgs.builder()
 *             .name("example-key")
 *             .keyRing(keyRing.id())
 *             .purpose("ENCRYPT_DECRYPT")
 *             .build());
 *         var default_ = new MetastoreService("default", MetastoreServiceArgs.builder()
 *             .serviceId("example-service")
 *             .location("us-central1")
 *             .encryptionConfig(MetastoreServiceEncryptionConfigArgs.builder()
 *                 .kmsKey(cryptoKey.id())
 *                 .build())
 *             .hiveMetastoreConfig(MetastoreServiceHiveMetastoreConfigArgs.builder()
 *                 .version("3.1.2")
 *                 .build())
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   default:
 *     type: gcp:dataproc:MetastoreService
 *     properties:
 *       serviceId: example-service
 *       location: us-central1
 *       encryptionConfig:
 *         kmsKey: ${cryptoKey.id}
 *       hiveMetastoreConfig:
 *         version: 3.1.2
 *   cryptoKey:
 *     type: gcp:kms:CryptoKey
 *     name: crypto_key
 *     properties:
 *       name: example-key
 *       keyRing: ${keyRing.id}
 *       purpose: ENCRYPT_DECRYPT
 *   keyRing:
 *     type: gcp:kms:KeyRing
 *     name: key_ring
 *     properties:
 *       name: example-keyring
 *       location: us-central1
 * ```
 * 
 * ### Dataproc Metastore Service Private Service Connect
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const net = new gcp.compute.Network("net", {
 *     name: "my-network",
 *     autoCreateSubnetworks: false,
 * });
 * const subnet = new gcp.compute.Subnetwork("subnet", {
 *     name: "my-subnetwork",
 *     region: "us-central1",
 *     network: net.id,
 *     ipCidrRange: "10.0.0.0/22",
 *     privateIpGoogleAccess: true,
 * });
 * const _default = new gcp.dataproc.MetastoreService("default", {
 *     serviceId: "metastore-srv",
 *     location: "us-central1",
 *     hiveMetastoreConfig: {
 *         version: "3.1.2",
 *     },
 *     networkConfig: {
 *         consumers: [{
 *             subnetwork: subnet.id,
 *         }],
 *     },
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * net = gcp.compute.Network("net",
 *     name="my-network",
 *     auto_create_subnetworks=False)
 * subnet = gcp.compute.Subnetwork("subnet",
 *     name="my-subnetwork",
 *     region="us-central1",
 *     network=net.id,
 *     ip_cidr_range="10.0.0.0/22",
 *     private_ip_google_access=True)
 * default = gcp.dataproc.MetastoreService("default",
 *     service_id="metastore-srv",
 *     location="us-central1",
 *     hive_metastore_config=gcp.dataproc.MetastoreServiceHiveMetastoreConfigArgs(
 *         version="3.1.2",
 *     ),
 *     network_config=gcp.dataproc.MetastoreServiceNetworkConfigArgs(
 *         consumers=[gcp.dataproc.MetastoreServiceNetworkConfigConsumerArgs(
 *             subnetwork=subnet.id,
 *         )],
 *     ))
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var net = new Gcp.Compute.Network("net", new()
 *     {
 *         Name = "my-network",
 *         AutoCreateSubnetworks = false,
 *     });
 *     var subnet = new Gcp.Compute.Subnetwork("subnet", new()
 *     {
 *         Name = "my-subnetwork",
 *         Region = "us-central1",
 *         Network = net.Id,
 *         IpCidrRange = "10.0.0.0/22",
 *         PrivateIpGoogleAccess = true,
 *     });
 *     var @default = new Gcp.Dataproc.MetastoreService("default", new()
 *     {
 *         ServiceId = "metastore-srv",
 *         Location = "us-central1",
 *         HiveMetastoreConfig = new Gcp.Dataproc.Inputs.MetastoreServiceHiveMetastoreConfigArgs
 *         {
 *             Version = "3.1.2",
 *         },
 *         NetworkConfig = new Gcp.Dataproc.Inputs.MetastoreServiceNetworkConfigArgs
 *         {
 *             Consumers = new[]
 *             {
 *                 new Gcp.Dataproc.Inputs.MetastoreServiceNetworkConfigConsumerArgs
 *                 {
 *                     Subnetwork = subnet.Id,
 *                 },
 *             },
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/compute"
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataproc"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		net, err := compute.NewNetwork(ctx, "net", &compute.NetworkArgs{
 * 			Name:                  pulumi.String("my-network"),
 * 			AutoCreateSubnetworks: pulumi.Bool(false),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		subnet, err := compute.NewSubnetwork(ctx, "subnet", &compute.SubnetworkArgs{
 * 			Name:                  pulumi.String("my-subnetwork"),
 * 			Region:                pulumi.String("us-central1"),
 * 			Network:               net.ID(),
 * 			IpCidrRange:           pulumi.String("10.0.0.0/22"),
 * 			PrivateIpGoogleAccess: pulumi.Bool(true),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = dataproc.NewMetastoreService(ctx, "default", &dataproc.MetastoreServiceArgs{
 * 			ServiceId: pulumi.String("metastore-srv"),
 * 			Location:  pulumi.String("us-central1"),
 * 			HiveMetastoreConfig: &dataproc.MetastoreServiceHiveMetastoreConfigArgs{
 * 				Version: pulumi.String("3.1.2"),
 * 			},
 * 			NetworkConfig: &dataproc.MetastoreServiceNetworkConfigArgs{
 * 				Consumers: dataproc.MetastoreServiceNetworkConfigConsumerArray{
 * 					&dataproc.MetastoreServiceNetworkConfigConsumerArgs{
 * 						Subnetwork: subnet.ID(),
 * 					},
 * 				},
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.compute.Network;
 * import com.pulumi.gcp.compute.NetworkArgs;
 * import com.pulumi.gcp.compute.Subnetwork;
 * import com.pulumi.gcp.compute.SubnetworkArgs;
 * import com.pulumi.gcp.dataproc.MetastoreService;
 * import com.pulumi.gcp.dataproc.MetastoreServiceArgs;
 * import com.pulumi.gcp.dataproc.inputs.MetastoreServiceHiveMetastoreConfigArgs;
 * import com.pulumi.gcp.dataproc.inputs.MetastoreServiceNetworkConfigArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var net = new Network("net", NetworkArgs.builder()
 *             .name("my-network")
 *             .autoCreateSubnetworks(false)
 *             .build());
 *         var subnet = new Subnetwork("subnet", SubnetworkArgs.builder()
 *             .name("my-subnetwork")
 *             .region("us-central1")
 *             .network(net.id())
 *             .ipCidrRange("10.0.0.0/22")
 *             .privateIpGoogleAccess(true)
 *             .build());
 *         var default_ = new MetastoreService("default", MetastoreServiceArgs.builder()
 *             .serviceId("metastore-srv")
 *             .location("us-central1")
 *             .hiveMetastoreConfig(MetastoreServiceHiveMetastoreConfigArgs.builder()
 *                 .version("3.1.2")
 *                 .build())
 *             .networkConfig(MetastoreServiceNetworkConfigArgs.builder()
 *                 .consumers(MetastoreServiceNetworkConfigConsumerArgs.builder()
 *                     .subnetwork(subnet.id())
 *                     .build())
 *                 .build())
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   net:
 *     type: gcp:compute:Network
 *     properties:
 *       name: my-network
 *       autoCreateSubnetworks: false
 *   subnet:
 *     type: gcp:compute:Subnetwork
 *     properties:
 *       name: my-subnetwork
 *       region: us-central1
 *       network: ${net.id}
 *       ipCidrRange: 10.0.0.0/22
 *       privateIpGoogleAccess: true
 *   default:
 *     type: gcp:dataproc:MetastoreService
 *     properties:
 *       serviceId: metastore-srv
 *       location: us-central1
 *       hiveMetastoreConfig:
 *         version: 3.1.2
 *       networkConfig:
 *         consumers:
 *           - subnetwork: ${subnet.id}
 * ```
 * 
 * ### Dataproc Metastore Service Private Service Connect Custom Routes
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const net = new gcp.compute.Network("net", {
 *     name: "my-network",
 *     autoCreateSubnetworks: false,
 * });
 * const subnet = new gcp.compute.Subnetwork("subnet", {
 *     name: "my-subnetwork",
 *     region: "us-central1",
 *     network: net.id,
 *     ipCidrRange: "10.0.0.0/22",
 *     privateIpGoogleAccess: true,
 * });
 * const _default = new gcp.dataproc.MetastoreService("default", {
 *     serviceId: "metastore-srv",
 *     location: "us-central1",
 *     hiveMetastoreConfig: {
 *         version: "3.1.2",
 *     },
 *     networkConfig: {
 *         consumers: [{
 *             subnetwork: subnet.id,
 *         }],
 *         customRoutesEnabled: true,
 *     },
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * net = gcp.compute.Network("net",
 *     name="my-network",
 *     auto_create_subnetworks=False)
 * subnet = gcp.compute.Subnetwork("subnet",
 *     name="my-subnetwork",
 *     region="us-central1",
 *     network=net.id,
 *     ip_cidr_range="10.0.0.0/22",
 *     private_ip_google_access=True)
 * default = gcp.dataproc.MetastoreService("default",
 *     service_id="metastore-srv",
 *     location="us-central1",
 *     hive_metastore_config=gcp.dataproc.MetastoreServiceHiveMetastoreConfigArgs(
 *         version="3.1.2",
 *     ),
 *     network_config=gcp.dataproc.MetastoreServiceNetworkConfigArgs(
 *         consumers=[gcp.dataproc.MetastoreServiceNetworkConfigConsumerArgs(
 *             subnetwork=subnet.id,
 *         )],
 *         custom_routes_enabled=True,
 *     ))
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var net = new Gcp.Compute.Network("net", new()
 *     {
 *         Name = "my-network",
 *         AutoCreateSubnetworks = false,
 *     });
 *     var subnet = new Gcp.Compute.Subnetwork("subnet", new()
 *     {
 *         Name = "my-subnetwork",
 *         Region = "us-central1",
 *         Network = net.Id,
 *         IpCidrRange = "10.0.0.0/22",
 *         PrivateIpGoogleAccess = true,
 *     });
 *     var @default = new Gcp.Dataproc.MetastoreService("default", new()
 *     {
 *         ServiceId = "metastore-srv",
 *         Location = "us-central1",
 *         HiveMetastoreConfig = new Gcp.Dataproc.Inputs.MetastoreServiceHiveMetastoreConfigArgs
 *         {
 *             Version = "3.1.2",
 *         },
 *         NetworkConfig = new Gcp.Dataproc.Inputs.MetastoreServiceNetworkConfigArgs
 *         {
 *             Consumers = new[]
 *             {
 *                 new Gcp.Dataproc.Inputs.MetastoreServiceNetworkConfigConsumerArgs
 *                 {
 *                     Subnetwork = subnet.Id,
 *                 },
 *             },
 *             CustomRoutesEnabled = true,
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/compute"
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataproc"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		net, err := compute.NewNetwork(ctx, "net", &compute.NetworkArgs{
 * 			Name:                  pulumi.String("my-network"),
 * 			AutoCreateSubnetworks: pulumi.Bool(false),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		subnet, err := compute.NewSubnetwork(ctx, "subnet", &compute.SubnetworkArgs{
 * 			Name:                  pulumi.String("my-subnetwork"),
 * 			Region:                pulumi.String("us-central1"),
 * 			Network:               net.ID(),
 * 			IpCidrRange:           pulumi.String("10.0.0.0/22"),
 * 			PrivateIpGoogleAccess: pulumi.Bool(true),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = dataproc.NewMetastoreService(ctx, "default", &dataproc.MetastoreServiceArgs{
 * 			ServiceId: pulumi.String("metastore-srv"),
 * 			Location:  pulumi.String("us-central1"),
 * 			HiveMetastoreConfig: &dataproc.MetastoreServiceHiveMetastoreConfigArgs{
 * 				Version: pulumi.String("3.1.2"),
 * 			},
 * 			NetworkConfig: &dataproc.MetastoreServiceNetworkConfigArgs{
 * 				Consumers: dataproc.MetastoreServiceNetworkConfigConsumerArray{
 * 					&dataproc.MetastoreServiceNetworkConfigConsumerArgs{
 * 						Subnetwork: subnet.ID(),
 * 					},
 * 				},
 * 				CustomRoutesEnabled: pulumi.Bool(true),
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.compute.Network;
 * import com.pulumi.gcp.compute.NetworkArgs;
 * import com.pulumi.gcp.compute.Subnetwork;
 * import com.pulumi.gcp.compute.SubnetworkArgs;
 * import com.pulumi.gcp.dataproc.MetastoreService;
 * import com.pulumi.gcp.dataproc.MetastoreServiceArgs;
 * import com.pulumi.gcp.dataproc.inputs.MetastoreServiceHiveMetastoreConfigArgs;
 * import com.pulumi.gcp.dataproc.inputs.MetastoreServiceNetworkConfigArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var net = new Network("net", NetworkArgs.builder()
 *             .name("my-network")
 *             .autoCreateSubnetworks(false)
 *             .build());
 *         var subnet = new Subnetwork("subnet", SubnetworkArgs.builder()
 *             .name("my-subnetwork")
 *             .region("us-central1")
 *             .network(net.id())
 *             .ipCidrRange("10.0.0.0/22")
 *             .privateIpGoogleAccess(true)
 *             .build());
 *         var default_ = new MetastoreService("default", MetastoreServiceArgs.builder()
 *             .serviceId("metastore-srv")
 *             .location("us-central1")
 *             .hiveMetastoreConfig(MetastoreServiceHiveMetastoreConfigArgs.builder()
 *                 .version("3.1.2")
 *                 .build())
 *             .networkConfig(MetastoreServiceNetworkConfigArgs.builder()
 *                 .consumers(MetastoreServiceNetworkConfigConsumerArgs.builder()
 *                     .subnetwork(subnet.id())
 *                     .build())
 *                 .customRoutesEnabled(true)
 *                 .build())
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   net:
 *     type: gcp:compute:Network
 *     properties:
 *       name: my-network
 *       autoCreateSubnetworks: false
 *   subnet:
 *     type: gcp:compute:Subnetwork
 *     properties:
 *       name: my-subnetwork
 *       region: us-central1
 *       network: ${net.id}
 *       ipCidrRange: 10.0.0.0/22
 *       privateIpGoogleAccess: true
 *   default:
 *     type: gcp:dataproc:MetastoreService
 *     properties:
 *       serviceId: metastore-srv
 *       location: us-central1
 *       hiveMetastoreConfig:
 *         version: 3.1.2
 *       networkConfig:
 *         consumers:
 *           - subnetwork: ${subnet.id}
 *         customRoutesEnabled: true
 * ```
 * 
 * ### Dataproc Metastore Service Dpms2
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const dpms2 = new gcp.dataproc.MetastoreService("dpms2", {
 *     serviceId: "ms-dpms2",
 *     location: "us-central1",
 *     databaseType: "SPANNER",
 *     hiveMetastoreConfig: {
 *         version: "3.1.2",
 *     },
 *     scalingConfig: {
 *         instanceSize: "EXTRA_SMALL",
 *     },
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * dpms2 = gcp.dataproc.MetastoreService("dpms2",
 *     service_id="ms-dpms2",
 *     location="us-central1",
 *     database_type="SPANNER",
 *     hive_metastore_config=gcp.dataproc.MetastoreServiceHiveMetastoreConfigArgs(
 *         version="3.1.2",
 *     ),
 *     scaling_config=gcp.dataproc.MetastoreServiceScalingConfigArgs(
 *         instance_size="EXTRA_SMALL",
 *     ))
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var dpms2 = new Gcp.Dataproc.MetastoreService("dpms2", new()
 *     {
 *         ServiceId = "ms-dpms2",
 *         Location = "us-central1",
 *         DatabaseType = "SPANNER",
 *         HiveMetastoreConfig = new Gcp.Dataproc.Inputs.MetastoreServiceHiveMetastoreConfigArgs
 *         {
 *             Version = "3.1.2",
 *         },
 *         ScalingConfig = new Gcp.Dataproc.Inputs.MetastoreServiceScalingConfigArgs
 *         {
 *             InstanceSize = "EXTRA_SMALL",
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataproc"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		_, err := dataproc.NewMetastoreService(ctx, "dpms2", &dataproc.MetastoreServiceArgs{
 * 			ServiceId:    pulumi.String("ms-dpms2"),
 * 			Location:     pulumi.String("us-central1"),
 * 			DatabaseType: pulumi.String("SPANNER"),
 * 			HiveMetastoreConfig: &dataproc.MetastoreServiceHiveMetastoreConfigArgs{
 * 				Version: pulumi.String("3.1.2"),
 * 			},
 * 			ScalingConfig: &dataproc.MetastoreServiceScalingConfigArgs{
 * 				InstanceSize: pulumi.String("EXTRA_SMALL"),
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.dataproc.MetastoreService;
 * import com.pulumi.gcp.dataproc.MetastoreServiceArgs;
 * import com.pulumi.gcp.dataproc.inputs.MetastoreServiceHiveMetastoreConfigArgs;
 * import com.pulumi.gcp.dataproc.inputs.MetastoreServiceScalingConfigArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var dpms2 = new MetastoreService("dpms2", MetastoreServiceArgs.builder()
 *             .serviceId("ms-dpms2")
 *             .location("us-central1")
 *             .databaseType("SPANNER")
 *             .hiveMetastoreConfig(MetastoreServiceHiveMetastoreConfigArgs.builder()
 *                 .version("3.1.2")
 *                 .build())
 *             .scalingConfig(MetastoreServiceScalingConfigArgs.builder()
 *                 .instanceSize("EXTRA_SMALL")
 *                 .build())
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   dpms2:
 *     type: gcp:dataproc:MetastoreService
 *     properties:
 *       serviceId: ms-dpms2
 *       location: us-central1
 *       databaseType: SPANNER
 *       hiveMetastoreConfig:
 *         version: 3.1.2
 *       scalingConfig:
 *         instanceSize: EXTRA_SMALL
 * ```
 * 
 * ### Dataproc Metastore Service Dpms2 Scaling Factor
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const dpms2ScalingFactor = new gcp.dataproc.MetastoreService("dpms2_scaling_factor", {
 *     serviceId: "ms-dpms2sf",
 *     location: "us-central1",
 *     databaseType: "SPANNER",
 *     hiveMetastoreConfig: {
 *         version: "3.1.2",
 *     },
 *     scalingConfig: {
 *         scalingFactor: 2,
 *     },
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * dpms2_scaling_factor = gcp.dataproc.MetastoreService("dpms2_scaling_factor",
 *     service_id="ms-dpms2sf",
 *     location="us-central1",
 *     database_type="SPANNER",
 *     hive_metastore_config=gcp.dataproc.MetastoreServiceHiveMetastoreConfigArgs(
 *         version="3.1.2",
 *     ),
 *     scaling_config=gcp.dataproc.MetastoreServiceScalingConfigArgs(
 *         scaling_factor=2,
 *     ))
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var dpms2ScalingFactor = new Gcp.Dataproc.MetastoreService("dpms2_scaling_factor", new()
 *     {
 *         ServiceId = "ms-dpms2sf",
 *         Location = "us-central1",
 *         DatabaseType = "SPANNER",
 *         HiveMetastoreConfig = new Gcp.Dataproc.Inputs.MetastoreServiceHiveMetastoreConfigArgs
 *         {
 *             Version = "3.1.2",
 *         },
 *         ScalingConfig = new Gcp.Dataproc.Inputs.MetastoreServiceScalingConfigArgs
 *         {
 *             ScalingFactor = 2,
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataproc"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		_, err := dataproc.NewMetastoreService(ctx, "dpms2_scaling_factor", &dataproc.MetastoreServiceArgs{
 * 			ServiceId:    pulumi.String("ms-dpms2sf"),
 * 			Location:     pulumi.String("us-central1"),
 * 			DatabaseType: pulumi.String("SPANNER"),
 * 			HiveMetastoreConfig: &dataproc.MetastoreServiceHiveMetastoreConfigArgs{
 * 				Version: pulumi.String("3.1.2"),
 * 			},
 * 			ScalingConfig: &dataproc.MetastoreServiceScalingConfigArgs{
 * 				ScalingFactor: pulumi.Float64(2),
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.dataproc.MetastoreService;
 * import com.pulumi.gcp.dataproc.MetastoreServiceArgs;
 * import com.pulumi.gcp.dataproc.inputs.MetastoreServiceHiveMetastoreConfigArgs;
 * import com.pulumi.gcp.dataproc.inputs.MetastoreServiceScalingConfigArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var dpms2ScalingFactor = new MetastoreService("dpms2ScalingFactor", MetastoreServiceArgs.builder()
 *             .serviceId("ms-dpms2sf")
 *             .location("us-central1")
 *             .databaseType("SPANNER")
 *             .hiveMetastoreConfig(MetastoreServiceHiveMetastoreConfigArgs.builder()
 *                 .version("3.1.2")
 *                 .build())
 *             .scalingConfig(MetastoreServiceScalingConfigArgs.builder()
 *                 .scalingFactor("2")
 *                 .build())
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   dpms2ScalingFactor:
 *     type: gcp:dataproc:MetastoreService
 *     name: dpms2_scaling_factor
 *     properties:
 *       serviceId: ms-dpms2sf
 *       location: us-central1
 *       databaseType: SPANNER
 *       hiveMetastoreConfig:
 *         version: 3.1.2
 *       scalingConfig:
 *         scalingFactor: '2'
 * ```
 * 
 * ### Dataproc Metastore Service Scheduled Backup
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const bucket = new gcp.storage.Bucket("bucket", {
 *     name: "backup",
 *     location: "us-central1",
 * });
 * const backup = new gcp.dataproc.MetastoreService("backup", {
 *     serviceId: "backup",
 *     location: "us-central1",
 *     port: 9080,
 *     tier: "DEVELOPER",
 *     maintenanceWindow: {
 *         hourOfDay: 2,
 *         dayOfWeek: "SUNDAY",
 *     },
 *     hiveMetastoreConfig: {
 *         version: "2.3.6",
 *     },
 *     scheduledBackup: {
 *         enabled: true,
 *         cronSchedule: "0 0 * * *",
 *         timeZone: "UTC",
 *         backupLocation: pulumi.interpolate`gs://${bucket.name}`,
 *     },
 *     labels: {
 *         env: "test",
 *     },
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * bucket = gcp.storage.Bucket("bucket",
 *     name="backup",
 *     location="us-central1")
 * backup = gcp.dataproc.MetastoreService("backup",
 *     service_id="backup",
 *     location="us-central1",
 *     port=9080,
 *     tier="DEVELOPER",
 *     maintenance_window=gcp.dataproc.MetastoreServiceMaintenanceWindowArgs(
 *         hour_of_day=2,
 *         day_of_week="SUNDAY",
 *     ),
 *     hive_metastore_config=gcp.dataproc.MetastoreServiceHiveMetastoreConfigArgs(
 *         version="2.3.6",
 *     ),
 *     scheduled_backup=gcp.dataproc.MetastoreServiceScheduledBackupArgs(
 *         enabled=True,
 *         cron_schedule="0 0 * * *",
 *         time_zone="UTC",
 *         backup_location=bucket.name.apply(lambda name: f"gs://{name}"),
 *     ),
 *     labels={
 *         "env": "test",
 *     })
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var bucket = new Gcp.Storage.Bucket("bucket", new()
 *     {
 *         Name = "backup",
 *         Location = "us-central1",
 *     });
 *     var backup = new Gcp.Dataproc.MetastoreService("backup", new()
 *     {
 *         ServiceId = "backup",
 *         Location = "us-central1",
 *         Port = 9080,
 *         Tier = "DEVELOPER",
 *         MaintenanceWindow = new Gcp.Dataproc.Inputs.MetastoreServiceMaintenanceWindowArgs
 *         {
 *             HourOfDay = 2,
 *             DayOfWeek = "SUNDAY",
 *         },
 *         HiveMetastoreConfig = new Gcp.Dataproc.Inputs.MetastoreServiceHiveMetastoreConfigArgs
 *         {
 *             Version = "2.3.6",
 *         },
 *         ScheduledBackup = new Gcp.Dataproc.Inputs.MetastoreServiceScheduledBackupArgs
 *         {
 *             Enabled = true,
 *             CronSchedule = "0 0 * * *",
 *             TimeZone = "UTC",
 *             BackupLocation = bucket.Name.Apply(name => $"gs://{name}"),
 *         },
 *         Labels =
 *         {
 *             { "env", "test" },
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"fmt"
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataproc"
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/storage"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		bucket, err := storage.NewBucket(ctx, "bucket", &storage.BucketArgs{
 * 			Name:     pulumi.String("backup"),
 * 			Location: pulumi.String("us-central1"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = dataproc.NewMetastoreService(ctx, "backup", &dataproc.MetastoreServiceArgs{
 * 			ServiceId: pulumi.String("backup"),
 * 			Location:  pulumi.String("us-central1"),
 * 			Port:      pulumi.Int(9080),
 * 			Tier:      pulumi.String("DEVELOPER"),
 * 			MaintenanceWindow: &dataproc.MetastoreServiceMaintenanceWindowArgs{
 * 				HourOfDay: pulumi.Int(2),
 * 				DayOfWeek: pulumi.String("SUNDAY"),
 * 			},
 * 			HiveMetastoreConfig: &dataproc.MetastoreServiceHiveMetastoreConfigArgs{
 * 				Version: pulumi.String("2.3.6"),
 * 			},
 * 			ScheduledBackup: &dataproc.MetastoreServiceScheduledBackupArgs{
 * 				Enabled:      pulumi.Bool(true),
 * 				CronSchedule: pulumi.String("0 0 * * *"),
 * 				TimeZone:     pulumi.String("UTC"),
 * 				BackupLocation: bucket.Name.ApplyT(func(name string) (string, error) {
 * 					return fmt.Sprintf("gs://%v", name), nil
 * 				}).(pulumi.StringOutput),
 * 			},
 * 			Labels: pulumi.StringMap{
 * 				"env": pulumi.String("test"),
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.storage.Bucket;
 * import com.pulumi.gcp.storage.BucketArgs;
 * import com.pulumi.gcp.dataproc.MetastoreService;
 * import com.pulumi.gcp.dataproc.MetastoreServiceArgs;
 * import com.pulumi.gcp.dataproc.inputs.MetastoreServiceMaintenanceWindowArgs;
 * import com.pulumi.gcp.dataproc.inputs.MetastoreServiceHiveMetastoreConfigArgs;
 * import com.pulumi.gcp.dataproc.inputs.MetastoreServiceScheduledBackupArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var bucket = new Bucket("bucket", BucketArgs.builder()
 *             .name("backup")
 *             .location("us-central1")
 *             .build());
 *         var backup = new MetastoreService("backup", MetastoreServiceArgs.builder()
 *             .serviceId("backup")
 *             .location("us-central1")
 *             .port(9080)
 *             .tier("DEVELOPER")
 *             .maintenanceWindow(MetastoreServiceMaintenanceWindowArgs.builder()
 *                 .hourOfDay(2)
 *                 .dayOfWeek("SUNDAY")
 *                 .build())
 *             .hiveMetastoreConfig(MetastoreServiceHiveMetastoreConfigArgs.builder()
 *                 .version("2.3.6")
 *                 .build())
 *             .scheduledBackup(MetastoreServiceScheduledBackupArgs.builder()
 *                 .enabled(true)
 *                 .cronSchedule("0 0 * * *")
 *                 .timeZone("UTC")
 *                 .backupLocation(bucket.name().applyValue(name -> String.format("gs://%s", name)))
 *                 .build())
 *             .labels(Map.of("env", "test"))
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   backup:
 *     type: gcp:dataproc:MetastoreService
 *     properties:
 *       serviceId: backup
 *       location: us-central1
 *       port: 9080
 *       tier: DEVELOPER
 *       maintenanceWindow:
 *         hourOfDay: 2
 *         dayOfWeek: SUNDAY
 *       hiveMetastoreConfig:
 *         version: 2.3.6
 *       scheduledBackup:
 *         enabled: true
 *         cronSchedule: 0 0 * * *
 *         timeZone: UTC
 *         backupLocation: gs://${bucket.name}
 *       labels:
 *         env: test
 *   bucket:
 *     type: gcp:storage:Bucket
 *     properties:
 *       name: backup
 *       location: us-central1
 * ```
 * 
 * ## Import
 * Service can be imported using any of these accepted formats:
 * * `projects/{{project}}/locations/{{location}}/services/{{service_id}}`
 * * `{{project}}/{{location}}/{{service_id}}`
 * * `{{location}}/{{service_id}}`
 * When using the `pulumi import` command, Service can be imported using one of the formats above. For example:
 * ```sh
 * $ pulumi import gcp:dataproc/metastoreService:MetastoreService default projects/{{project}}/locations/{{location}}/services/{{service_id}}
 * ```
 * ```sh
 * $ pulumi import gcp:dataproc/metastoreService:MetastoreService default {{project}}/{{location}}/{{service_id}}
 * ```
 * ```sh
 * $ pulumi import gcp:dataproc/metastoreService:MetastoreService default {{location}}/{{service_id}}
 * ```
 * @property databaseType The database type that the Metastore service stores its data.
 * Default value is `MYSQL`.
 * Possible values are: `MYSQL`, `SPANNER`.
 * @property encryptionConfig Information used to configure the Dataproc Metastore service to encrypt
 * customer data at rest.
 * Structure is documented below.
 * @property hiveMetastoreConfig Configuration information specific to running Hive metastore software as the metastore service.
 * Structure is documented below.
 * @property labels User-defined labels for the metastore service.
 * **Note**: This field is non-authoritative, and will only manage the labels present in your configuration.
 * Please refer to the field `effective_labels` for all of the labels present on the resource.
 * @property location The location where the metastore service should reside.
 * The default value is `global`.
 * @property maintenanceWindow The one hour maintenance window of the metastore service.
 * This specifies when the service can be restarted for maintenance purposes in UTC time.
 * Maintenance window is not needed for services with the `SPANNER` database type.
 * Structure is documented below.
 * @property metadataIntegration The setting that defines how metastore metadata should be integrated with external services and systems.
 * Structure is documented below.
 * @property network The relative resource name of the VPC network on which the instance can be accessed. It is specified in the following form:
 * "projects/{projectNumber}/global/networks/{network_id}".
 * @property networkConfig The configuration specifying the network settings for the Dataproc Metastore service.
 * Structure is documented below.
 * @property port The TCP port at which the metastore service is reached. Default: 9083.
 * @property project The ID of the project in which the resource belongs.
 * If it is not provided, the provider project is used.
 * @property releaseChannel The release channel of the service. If unspecified, defaults to `STABLE`.
 * Default value is `STABLE`.
 * Possible values are: `CANARY`, `STABLE`.
 * @property scalingConfig Represents the scaling configuration of a metastore service.
 * Structure is documented below.
 * @property scheduledBackup The configuration of scheduled backup for the metastore service.
 * Structure is documented below.
 * @property serviceId The ID of the metastore service. The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (_),
 * and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between
 * 3 and 63 characters.
 * - - -
 * @property telemetryConfig The configuration specifying telemetry settings for the Dataproc Metastore service. If unspecified defaults to JSON.
 * Structure is documented below.
 * @property tier The tier of the service.
 * Possible values are: `DEVELOPER`, `ENTERPRISE`.
 */
public data class MetastoreServiceArgs(
    public val databaseType: Output? = null,
    public val encryptionConfig: Output? = null,
    public val hiveMetastoreConfig: Output? = null,
    public val labels: Output>? = null,
    public val location: Output? = null,
    public val maintenanceWindow: Output? = null,
    public val metadataIntegration: Output? = null,
    public val network: Output? = null,
    public val networkConfig: Output? = null,
    public val port: Output? = null,
    public val project: Output? = null,
    public val releaseChannel: Output? = null,
    public val scalingConfig: Output? = null,
    public val scheduledBackup: Output? = null,
    public val serviceId: Output? = null,
    public val telemetryConfig: Output? = null,
    public val tier: Output? = null,
) : ConvertibleToJava {
    override fun toJava(): com.pulumi.gcp.dataproc.MetastoreServiceArgs =
        com.pulumi.gcp.dataproc.MetastoreServiceArgs.builder()
            .databaseType(databaseType?.applyValue({ args0 -> args0 }))
            .encryptionConfig(encryptionConfig?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
            .hiveMetastoreConfig(
                hiveMetastoreConfig?.applyValue({ args0 ->
                    args0.let({ args0 ->
                        args0.toJava()
                    })
                }),
            )
            .labels(labels?.applyValue({ args0 -> args0.map({ args0 -> args0.key.to(args0.value) }).toMap() }))
            .location(location?.applyValue({ args0 -> args0 }))
            .maintenanceWindow(maintenanceWindow?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
            .metadataIntegration(
                metadataIntegration?.applyValue({ args0 ->
                    args0.let({ args0 ->
                        args0.toJava()
                    })
                }),
            )
            .network(network?.applyValue({ args0 -> args0 }))
            .networkConfig(networkConfig?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
            .port(port?.applyValue({ args0 -> args0 }))
            .project(project?.applyValue({ args0 -> args0 }))
            .releaseChannel(releaseChannel?.applyValue({ args0 -> args0 }))
            .scalingConfig(scalingConfig?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
            .scheduledBackup(scheduledBackup?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
            .serviceId(serviceId?.applyValue({ args0 -> args0 }))
            .telemetryConfig(telemetryConfig?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
            .tier(tier?.applyValue({ args0 -> args0 })).build()
}

/**
 * Builder for [MetastoreServiceArgs].
 */
@PulumiTagMarker
public class MetastoreServiceArgsBuilder internal constructor() {
    private var databaseType: Output? = null

    private var encryptionConfig: Output? = null

    private var hiveMetastoreConfig: Output? = null

    private var labels: Output>? = null

    private var location: Output? = null

    private var maintenanceWindow: Output? = null

    private var metadataIntegration: Output? = null

    private var network: Output? = null

    private var networkConfig: Output? = null

    private var port: Output? = null

    private var project: Output? = null

    private var releaseChannel: Output? = null

    private var scalingConfig: Output? = null

    private var scheduledBackup: Output? = null

    private var serviceId: Output? = null

    private var telemetryConfig: Output? = null

    private var tier: Output? = null

    /**
     * @param value The database type that the Metastore service stores its data.
     * Default value is `MYSQL`.
     * Possible values are: `MYSQL`, `SPANNER`.
     */
    @JvmName("pjlakujlcfpoixqc")
    public suspend fun databaseType(`value`: Output) {
        this.databaseType = value
    }

    /**
     * @param value Information used to configure the Dataproc Metastore service to encrypt
     * customer data at rest.
     * Structure is documented below.
     */
    @JvmName("flwujghalqafftyr")
    public suspend fun encryptionConfig(`value`: Output) {
        this.encryptionConfig = value
    }

    /**
     * @param value Configuration information specific to running Hive metastore software as the metastore service.
     * Structure is documented below.
     */
    @JvmName("qgycgggannmfjdih")
    public suspend fun hiveMetastoreConfig(`value`: Output) {
        this.hiveMetastoreConfig = value
    }

    /**
     * @param value User-defined labels for the metastore service.
     * **Note**: This field is non-authoritative, and will only manage the labels present in your configuration.
     * Please refer to the field `effective_labels` for all of the labels present on the resource.
     */
    @JvmName("bdlcqowypkwqaheb")
    public suspend fun labels(`value`: Output>) {
        this.labels = value
    }

    /**
     * @param value The location where the metastore service should reside.
     * The default value is `global`.
     */
    @JvmName("qwjxiyruwxfqovnm")
    public suspend fun location(`value`: Output) {
        this.location = value
    }

    /**
     * @param value The one hour maintenance window of the metastore service.
     * This specifies when the service can be restarted for maintenance purposes in UTC time.
     * Maintenance window is not needed for services with the `SPANNER` database type.
     * Structure is documented below.
     */
    @JvmName("upffffcnnhbymvqv")
    public suspend fun maintenanceWindow(`value`: Output) {
        this.maintenanceWindow = value
    }

    /**
     * @param value The setting that defines how metastore metadata should be integrated with external services and systems.
     * Structure is documented below.
     */
    @JvmName("nvubvtdtgockjjnh")
    public suspend fun metadataIntegration(`value`: Output) {
        this.metadataIntegration = value
    }

    /**
     * @param value The relative resource name of the VPC network on which the instance can be accessed. It is specified in the following form:
     * "projects/{projectNumber}/global/networks/{network_id}".
     */
    @JvmName("yoskaprsqvywgsvv")
    public suspend fun network(`value`: Output) {
        this.network = value
    }

    /**
     * @param value The configuration specifying the network settings for the Dataproc Metastore service.
     * Structure is documented below.
     */
    @JvmName("aduwpydobtciqhwy")
    public suspend fun networkConfig(`value`: Output) {
        this.networkConfig = value
    }

    /**
     * @param value The TCP port at which the metastore service is reached. Default: 9083.
     */
    @JvmName("dclscerdfhggcpkb")
    public suspend fun port(`value`: Output) {
        this.port = value
    }

    /**
     * @param value The ID of the project in which the resource belongs.
     * If it is not provided, the provider project is used.
     */
    @JvmName("jksbakvrpmjiblxi")
    public suspend fun project(`value`: Output) {
        this.project = value
    }

    /**
     * @param value The release channel of the service. If unspecified, defaults to `STABLE`.
     * Default value is `STABLE`.
     * Possible values are: `CANARY`, `STABLE`.
     */
    @JvmName("trystsokfpedsgyb")
    public suspend fun releaseChannel(`value`: Output) {
        this.releaseChannel = value
    }

    /**
     * @param value Represents the scaling configuration of a metastore service.
     * Structure is documented below.
     */
    @JvmName("dbixxbcdhpadvdfg")
    public suspend fun scalingConfig(`value`: Output) {
        this.scalingConfig = value
    }

    /**
     * @param value The configuration of scheduled backup for the metastore service.
     * Structure is documented below.
     */
    @JvmName("vwxwtkaxkgoyghdx")
    public suspend fun scheduledBackup(`value`: Output) {
        this.scheduledBackup = value
    }

    /**
     * @param value The ID of the metastore service. The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (_),
     * and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between
     * 3 and 63 characters.
     * - - -
     */
    @JvmName("ajvvwcpnwgnpcgdp")
    public suspend fun serviceId(`value`: Output) {
        this.serviceId = value
    }

    /**
     * @param value The configuration specifying telemetry settings for the Dataproc Metastore service. If unspecified defaults to JSON.
     * Structure is documented below.
     */
    @JvmName("guvtnsnevvgjeaxd")
    public suspend fun telemetryConfig(`value`: Output) {
        this.telemetryConfig = value
    }

    /**
     * @param value The tier of the service.
     * Possible values are: `DEVELOPER`, `ENTERPRISE`.
     */
    @JvmName("pxawrgsplyprbuwb")
    public suspend fun tier(`value`: Output) {
        this.tier = value
    }

    /**
     * @param value The database type that the Metastore service stores its data.
     * Default value is `MYSQL`.
     * Possible values are: `MYSQL`, `SPANNER`.
     */
    @JvmName("fglwmpboreymywvu")
    public suspend fun databaseType(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.databaseType = mapped
    }

    /**
     * @param value Information used to configure the Dataproc Metastore service to encrypt
     * customer data at rest.
     * Structure is documented below.
     */
    @JvmName("cwxjfjoswtipnnfi")
    public suspend fun encryptionConfig(`value`: MetastoreServiceEncryptionConfigArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.encryptionConfig = mapped
    }

    /**
     * @param argument Information used to configure the Dataproc Metastore service to encrypt
     * customer data at rest.
     * Structure is documented below.
     */
    @JvmName("tmwolqexatywqgyb")
    public suspend fun encryptionConfig(argument: suspend MetastoreServiceEncryptionConfigArgsBuilder.() -> Unit) {
        val toBeMapped = MetastoreServiceEncryptionConfigArgsBuilder().applySuspend {
            argument()
        }.build()
        val mapped = of(toBeMapped)
        this.encryptionConfig = mapped
    }

    /**
     * @param value Configuration information specific to running Hive metastore software as the metastore service.
     * Structure is documented below.
     */
    @JvmName("njojmorqtgsvdkkc")
    public suspend fun hiveMetastoreConfig(`value`: MetastoreServiceHiveMetastoreConfigArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.hiveMetastoreConfig = mapped
    }

    /**
     * @param argument Configuration information specific to running Hive metastore software as the metastore service.
     * Structure is documented below.
     */
    @JvmName("gvungoednwkvebgj")
    public suspend fun hiveMetastoreConfig(argument: suspend MetastoreServiceHiveMetastoreConfigArgsBuilder.() -> Unit) {
        val toBeMapped = MetastoreServiceHiveMetastoreConfigArgsBuilder().applySuspend {
            argument()
        }.build()
        val mapped = of(toBeMapped)
        this.hiveMetastoreConfig = mapped
    }

    /**
     * @param value User-defined labels for the metastore service.
     * **Note**: This field is non-authoritative, and will only manage the labels present in your configuration.
     * Please refer to the field `effective_labels` for all of the labels present on the resource.
     */
    @JvmName("aybdwuylsguwbudq")
    public suspend fun labels(`value`: Map?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.labels = mapped
    }

    /**
     * @param values User-defined labels for the metastore service.
     * **Note**: This field is non-authoritative, and will only manage the labels present in your configuration.
     * Please refer to the field `effective_labels` for all of the labels present on the resource.
     */
    @JvmName("pwtvbxgfhrifengb")
    public fun labels(vararg values: Pair) {
        val toBeMapped = values.toMap()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.labels = mapped
    }

    /**
     * @param value The location where the metastore service should reside.
     * The default value is `global`.
     */
    @JvmName("ruyqrdbudfbxhfnk")
    public suspend fun location(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.location = mapped
    }

    /**
     * @param value The one hour maintenance window of the metastore service.
     * This specifies when the service can be restarted for maintenance purposes in UTC time.
     * Maintenance window is not needed for services with the `SPANNER` database type.
     * Structure is documented below.
     */
    @JvmName("omenpmyopiemtjlo")
    public suspend fun maintenanceWindow(`value`: MetastoreServiceMaintenanceWindowArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.maintenanceWindow = mapped
    }

    /**
     * @param argument The one hour maintenance window of the metastore service.
     * This specifies when the service can be restarted for maintenance purposes in UTC time.
     * Maintenance window is not needed for services with the `SPANNER` database type.
     * Structure is documented below.
     */
    @JvmName("xsgoficuqpwrgrjk")
    public suspend fun maintenanceWindow(argument: suspend MetastoreServiceMaintenanceWindowArgsBuilder.() -> Unit) {
        val toBeMapped = MetastoreServiceMaintenanceWindowArgsBuilder().applySuspend {
            argument()
        }.build()
        val mapped = of(toBeMapped)
        this.maintenanceWindow = mapped
    }

    /**
     * @param value The setting that defines how metastore metadata should be integrated with external services and systems.
     * Structure is documented below.
     */
    @JvmName("dobgrrjvnkkkausp")
    public suspend fun metadataIntegration(`value`: MetastoreServiceMetadataIntegrationArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.metadataIntegration = mapped
    }

    /**
     * @param argument The setting that defines how metastore metadata should be integrated with external services and systems.
     * Structure is documented below.
     */
    @JvmName("eguxkypssyhaqdld")
    public suspend fun metadataIntegration(argument: suspend MetastoreServiceMetadataIntegrationArgsBuilder.() -> Unit) {
        val toBeMapped = MetastoreServiceMetadataIntegrationArgsBuilder().applySuspend {
            argument()
        }.build()
        val mapped = of(toBeMapped)
        this.metadataIntegration = mapped
    }

    /**
     * @param value The relative resource name of the VPC network on which the instance can be accessed. It is specified in the following form:
     * "projects/{projectNumber}/global/networks/{network_id}".
     */
    @JvmName("ogaqwqpyivgvenye")
    public suspend fun network(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.network = mapped
    }

    /**
     * @param value The configuration specifying the network settings for the Dataproc Metastore service.
     * Structure is documented below.
     */
    @JvmName("ocrcvcvbuvorsshd")
    public suspend fun networkConfig(`value`: MetastoreServiceNetworkConfigArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.networkConfig = mapped
    }

    /**
     * @param argument The configuration specifying the network settings for the Dataproc Metastore service.
     * Structure is documented below.
     */
    @JvmName("akyuikptbkiibxgc")
    public suspend fun networkConfig(argument: suspend MetastoreServiceNetworkConfigArgsBuilder.() -> Unit) {
        val toBeMapped = MetastoreServiceNetworkConfigArgsBuilder().applySuspend { argument() }.build()
        val mapped = of(toBeMapped)
        this.networkConfig = mapped
    }

    /**
     * @param value The TCP port at which the metastore service is reached. Default: 9083.
     */
    @JvmName("cyjblmdwpgiwqhav")
    public suspend fun port(`value`: Int?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.port = mapped
    }

    /**
     * @param value The ID of the project in which the resource belongs.
     * If it is not provided, the provider project is used.
     */
    @JvmName("gjwmjuecuudrtqsy")
    public suspend fun project(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.project = mapped
    }

    /**
     * @param value The release channel of the service. If unspecified, defaults to `STABLE`.
     * Default value is `STABLE`.
     * Possible values are: `CANARY`, `STABLE`.
     */
    @JvmName("fdpdbkurvdffkhpw")
    public suspend fun releaseChannel(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.releaseChannel = mapped
    }

    /**
     * @param value Represents the scaling configuration of a metastore service.
     * Structure is documented below.
     */
    @JvmName("ttwyukjrktmpuuvr")
    public suspend fun scalingConfig(`value`: MetastoreServiceScalingConfigArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.scalingConfig = mapped
    }

    /**
     * @param argument Represents the scaling configuration of a metastore service.
     * Structure is documented below.
     */
    @JvmName("qlveuvybvcifnioi")
    public suspend fun scalingConfig(argument: suspend MetastoreServiceScalingConfigArgsBuilder.() -> Unit) {
        val toBeMapped = MetastoreServiceScalingConfigArgsBuilder().applySuspend { argument() }.build()
        val mapped = of(toBeMapped)
        this.scalingConfig = mapped
    }

    /**
     * @param value The configuration of scheduled backup for the metastore service.
     * Structure is documented below.
     */
    @JvmName("xudvinrygiixqqjr")
    public suspend fun scheduledBackup(`value`: MetastoreServiceScheduledBackupArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.scheduledBackup = mapped
    }

    /**
     * @param argument The configuration of scheduled backup for the metastore service.
     * Structure is documented below.
     */
    @JvmName("iymvruftaowobota")
    public suspend fun scheduledBackup(argument: suspend MetastoreServiceScheduledBackupArgsBuilder.() -> Unit) {
        val toBeMapped = MetastoreServiceScheduledBackupArgsBuilder().applySuspend { argument() }.build()
        val mapped = of(toBeMapped)
        this.scheduledBackup = mapped
    }

    /**
     * @param value The ID of the metastore service. The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (_),
     * and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between
     * 3 and 63 characters.
     * - - -
     */
    @JvmName("ncoexwgiehjngnra")
    public suspend fun serviceId(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.serviceId = mapped
    }

    /**
     * @param value The configuration specifying telemetry settings for the Dataproc Metastore service. If unspecified defaults to JSON.
     * Structure is documented below.
     */
    @JvmName("svavjbrsylbcpgxe")
    public suspend fun telemetryConfig(`value`: MetastoreServiceTelemetryConfigArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.telemetryConfig = mapped
    }

    /**
     * @param argument The configuration specifying telemetry settings for the Dataproc Metastore service. If unspecified defaults to JSON.
     * Structure is documented below.
     */
    @JvmName("wknovectldghrpuq")
    public suspend fun telemetryConfig(argument: suspend MetastoreServiceTelemetryConfigArgsBuilder.() -> Unit) {
        val toBeMapped = MetastoreServiceTelemetryConfigArgsBuilder().applySuspend { argument() }.build()
        val mapped = of(toBeMapped)
        this.telemetryConfig = mapped
    }

    /**
     * @param value The tier of the service.
     * Possible values are: `DEVELOPER`, `ENTERPRISE`.
     */
    @JvmName("iklxegglbvsjburo")
    public suspend fun tier(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.tier = mapped
    }

    internal fun build(): MetastoreServiceArgs = MetastoreServiceArgs(
        databaseType = databaseType,
        encryptionConfig = encryptionConfig,
        hiveMetastoreConfig = hiveMetastoreConfig,
        labels = labels,
        location = location,
        maintenanceWindow = maintenanceWindow,
        metadataIntegration = metadataIntegration,
        network = network,
        networkConfig = networkConfig,
        port = port,
        project = project,
        releaseChannel = releaseChannel,
        scalingConfig = scalingConfig,
        scheduledBackup = scheduledBackup,
        serviceId = serviceId,
        telemetryConfig = telemetryConfig,
        tier = tier,
    )
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy