All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.gcp.dataproc.kotlin.GdcApplicationEnvironmentArgs.kt Maven / Gradle / Ivy

Go to download

Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.

There is a newer version: 8.20.1.0
Show newest version
@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.gcp.dataproc.kotlin

import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.gcp.dataproc.GdcApplicationEnvironmentArgs.builder
import com.pulumi.gcp.dataproc.kotlin.inputs.GdcApplicationEnvironmentSparkApplicationEnvironmentConfigArgs
import com.pulumi.gcp.dataproc.kotlin.inputs.GdcApplicationEnvironmentSparkApplicationEnvironmentConfigArgsBuilder
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.applySuspend
import kotlin.Pair
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.collections.Map
import kotlin.jvm.JvmName

/**
 * An ApplicationEnvironment contains shared configuration that may be referenced by multiple SparkApplications.
 * To get more information about ApplicationEnvironment, see:
 * * [API documentation](https://cloud.google.com/dataproc-gdc/docs/reference/rest/v1/projects.locations.applicationEnvironments)
 * * How-to Guides
 *     * [Dataproc Intro](https://cloud.google.com/dataproc/)
 * ## Example Usage
 * ### Dataprocgdc Applicationenvironment Basic
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const application_environment = new gcp.dataproc.GdcApplicationEnvironment("application-environment", {
 *     applicationEnvironmentId: "dp-tf-e2e-application-environment-basic",
 *     serviceinstance: "do-not-delete-dataproc-gdc-instance",
 *     project: "my-project",
 *     location: "us-west2",
 *     namespace: "default",
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * application_environment = gcp.dataproc.GdcApplicationEnvironment("application-environment",
 *     application_environment_id="dp-tf-e2e-application-environment-basic",
 *     serviceinstance="do-not-delete-dataproc-gdc-instance",
 *     project="my-project",
 *     location="us-west2",
 *     namespace="default")
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var application_environment = new Gcp.Dataproc.GdcApplicationEnvironment("application-environment", new()
 *     {
 *         ApplicationEnvironmentId = "dp-tf-e2e-application-environment-basic",
 *         Serviceinstance = "do-not-delete-dataproc-gdc-instance",
 *         Project = "my-project",
 *         Location = "us-west2",
 *         Namespace = "default",
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataproc"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		_, err := dataproc.NewGdcApplicationEnvironment(ctx, "application-environment", &dataproc.GdcApplicationEnvironmentArgs{
 * 			ApplicationEnvironmentId: pulumi.String("dp-tf-e2e-application-environment-basic"),
 * 			Serviceinstance:          pulumi.String("do-not-delete-dataproc-gdc-instance"),
 * 			Project:                  pulumi.String("my-project"),
 * 			Location:                 pulumi.String("us-west2"),
 * 			Namespace:                pulumi.String("default"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.dataproc.GdcApplicationEnvironment;
 * import com.pulumi.gcp.dataproc.GdcApplicationEnvironmentArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var application_environment = new GdcApplicationEnvironment("application-environment", GdcApplicationEnvironmentArgs.builder()
 *             .applicationEnvironmentId("dp-tf-e2e-application-environment-basic")
 *             .serviceinstance("do-not-delete-dataproc-gdc-instance")
 *             .project("my-project")
 *             .location("us-west2")
 *             .namespace("default")
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   application-environment:
 *     type: gcp:dataproc:GdcApplicationEnvironment
 *     properties:
 *       applicationEnvironmentId: dp-tf-e2e-application-environment-basic
 *       serviceinstance: do-not-delete-dataproc-gdc-instance
 *       project: my-project
 *       location: us-west2
 *       namespace: default
 * ```
 * 
 * ### Dataprocgdc Applicationenvironment
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const application_environment = new gcp.dataproc.GdcApplicationEnvironment("application-environment", {
 *     applicationEnvironmentId: "dp-tf-e2e-application-environment",
 *     serviceinstance: "do-not-delete-dataproc-gdc-instance",
 *     project: "my-project",
 *     location: "us-west2",
 *     namespace: "default",
 *     displayName: "An application environment",
 *     labels: {
 *         "test-label": "label-value",
 *     },
 *     annotations: {
 *         an_annotation: "annotation_value",
 *     },
 *     sparkApplicationEnvironmentConfig: {
 *         defaultProperties: {
 *             "spark.executor.memory": "4g",
 *         },
 *         defaultVersion: "1.2",
 *     },
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * application_environment = gcp.dataproc.GdcApplicationEnvironment("application-environment",
 *     application_environment_id="dp-tf-e2e-application-environment",
 *     serviceinstance="do-not-delete-dataproc-gdc-instance",
 *     project="my-project",
 *     location="us-west2",
 *     namespace="default",
 *     display_name="An application environment",
 *     labels={
 *         "test-label": "label-value",
 *     },
 *     annotations={
 *         "an_annotation": "annotation_value",
 *     },
 *     spark_application_environment_config={
 *         "default_properties": {
 *             "spark.executor.memory": "4g",
 *         },
 *         "default_version": "1.2",
 *     })
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var application_environment = new Gcp.Dataproc.GdcApplicationEnvironment("application-environment", new()
 *     {
 *         ApplicationEnvironmentId = "dp-tf-e2e-application-environment",
 *         Serviceinstance = "do-not-delete-dataproc-gdc-instance",
 *         Project = "my-project",
 *         Location = "us-west2",
 *         Namespace = "default",
 *         DisplayName = "An application environment",
 *         Labels =
 *         {
 *             { "test-label", "label-value" },
 *         },
 *         Annotations =
 *         {
 *             { "an_annotation", "annotation_value" },
 *         },
 *         SparkApplicationEnvironmentConfig = new Gcp.Dataproc.Inputs.GdcApplicationEnvironmentSparkApplicationEnvironmentConfigArgs
 *         {
 *             DefaultProperties =
 *             {
 *                 { "spark.executor.memory", "4g" },
 *             },
 *             DefaultVersion = "1.2",
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataproc"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		_, err := dataproc.NewGdcApplicationEnvironment(ctx, "application-environment", &dataproc.GdcApplicationEnvironmentArgs{
 * 			ApplicationEnvironmentId: pulumi.String("dp-tf-e2e-application-environment"),
 * 			Serviceinstance:          pulumi.String("do-not-delete-dataproc-gdc-instance"),
 * 			Project:                  pulumi.String("my-project"),
 * 			Location:                 pulumi.String("us-west2"),
 * 			Namespace:                pulumi.String("default"),
 * 			DisplayName:              pulumi.String("An application environment"),
 * 			Labels: pulumi.StringMap{
 * 				"test-label": pulumi.String("label-value"),
 * 			},
 * 			Annotations: pulumi.StringMap{
 * 				"an_annotation": pulumi.String("annotation_value"),
 * 			},
 * 			SparkApplicationEnvironmentConfig: &dataproc.GdcApplicationEnvironmentSparkApplicationEnvironmentConfigArgs{
 * 				DefaultProperties: pulumi.StringMap{
 * 					"spark.executor.memory": pulumi.String("4g"),
 * 				},
 * 				DefaultVersion: pulumi.String("1.2"),
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.dataproc.GdcApplicationEnvironment;
 * import com.pulumi.gcp.dataproc.GdcApplicationEnvironmentArgs;
 * import com.pulumi.gcp.dataproc.inputs.GdcApplicationEnvironmentSparkApplicationEnvironmentConfigArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var application_environment = new GdcApplicationEnvironment("application-environment", GdcApplicationEnvironmentArgs.builder()
 *             .applicationEnvironmentId("dp-tf-e2e-application-environment")
 *             .serviceinstance("do-not-delete-dataproc-gdc-instance")
 *             .project("my-project")
 *             .location("us-west2")
 *             .namespace("default")
 *             .displayName("An application environment")
 *             .labels(Map.of("test-label", "label-value"))
 *             .annotations(Map.of("an_annotation", "annotation_value"))
 *             .sparkApplicationEnvironmentConfig(GdcApplicationEnvironmentSparkApplicationEnvironmentConfigArgs.builder()
 *                 .defaultProperties(Map.of("spark.executor.memory", "4g"))
 *                 .defaultVersion("1.2")
 *                 .build())
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   application-environment:
 *     type: gcp:dataproc:GdcApplicationEnvironment
 *     properties:
 *       applicationEnvironmentId: dp-tf-e2e-application-environment
 *       serviceinstance: do-not-delete-dataproc-gdc-instance
 *       project: my-project
 *       location: us-west2
 *       namespace: default
 *       displayName: An application environment
 *       labels:
 *         test-label: label-value
 *       annotations:
 *         an_annotation: annotation_value
 *       sparkApplicationEnvironmentConfig:
 *         defaultProperties:
 *           spark.executor.memory: 4g
 *         defaultVersion: '1.2'
 * ```
 * 
 * ## Import
 * ApplicationEnvironment can be imported using any of these accepted formats:
 * * `projects/{{project}}/locations/{{location}}/serviceInstances/{{serviceinstance}}/applicationEnvironments/{{application_environment_id}}`
 * * `{{project}}/{{location}}/{{serviceinstance}}/{{application_environment_id}}`
 * * `{{location}}/{{serviceinstance}}/{{application_environment_id}}`
 * When using the `pulumi import` command, ApplicationEnvironment can be imported using one of the formats above. For example:
 * ```sh
 * $ pulumi import gcp:dataproc/gdcApplicationEnvironment:GdcApplicationEnvironment default projects/{{project}}/locations/{{location}}/serviceInstances/{{serviceinstance}}/applicationEnvironments/{{application_environment_id}}
 * ```
 * ```sh
 * $ pulumi import gcp:dataproc/gdcApplicationEnvironment:GdcApplicationEnvironment default {{project}}/{{location}}/{{serviceinstance}}/{{application_environment_id}}
 * ```
 * ```sh
 * $ pulumi import gcp:dataproc/gdcApplicationEnvironment:GdcApplicationEnvironment default {{location}}/{{serviceinstance}}/{{application_environment_id}}
 * ```
 * @property annotations The annotations to associate with this application environment. Annotations may be used to store client information, but are not used by the server.
 * **Note**: This field is non-authoritative, and will only manage the annotations present in your configuration.
 * Please refer to the field `effective_annotations` for all of the annotations present on the resource.
 * @property applicationEnvironmentId The id of the application environment
 * @property displayName User-provided human-readable name to be used in user interfaces.
 * @property labels The labels to associate with this application environment. Labels may be used for filtering and billing tracking.
 * **Note**: This field is non-authoritative, and will only manage the labels present in your configuration.
 * Please refer to the field `effective_labels` for all of the labels present on the resource.
 * @property location The location of the application environment
 * @property namespace The name of the namespace in which to create this ApplicationEnvironment. This namespace must already exist in the cluster
 * @property project The ID of the project in which the resource belongs.
 * If it is not provided, the provider project is used.
 * @property serviceinstance The id of the service instance to which this application environment belongs.
 * - - -
 * @property sparkApplicationEnvironmentConfig Represents the SparkApplicationEnvironmentConfig.
 * Structure is documented below.
 */
public data class GdcApplicationEnvironmentArgs(
    public val annotations: Output>? = null,
    public val applicationEnvironmentId: Output? = null,
    public val displayName: Output? = null,
    public val labels: Output>? = null,
    public val location: Output? = null,
    public val namespace: Output? = null,
    public val project: Output? = null,
    public val serviceinstance: Output? = null,
    public val sparkApplicationEnvironmentConfig: Output? = null,
) : ConvertibleToJava {
    override fun toJava(): com.pulumi.gcp.dataproc.GdcApplicationEnvironmentArgs =
        com.pulumi.gcp.dataproc.GdcApplicationEnvironmentArgs.builder()
            .annotations(
                annotations?.applyValue({ args0 ->
                    args0.map({ args0 ->
                        args0.key.to(args0.value)
                    }).toMap()
                }),
            )
            .applicationEnvironmentId(applicationEnvironmentId?.applyValue({ args0 -> args0 }))
            .displayName(displayName?.applyValue({ args0 -> args0 }))
            .labels(labels?.applyValue({ args0 -> args0.map({ args0 -> args0.key.to(args0.value) }).toMap() }))
            .location(location?.applyValue({ args0 -> args0 }))
            .namespace(namespace?.applyValue({ args0 -> args0 }))
            .project(project?.applyValue({ args0 -> args0 }))
            .serviceinstance(serviceinstance?.applyValue({ args0 -> args0 }))
            .sparkApplicationEnvironmentConfig(
                sparkApplicationEnvironmentConfig?.applyValue({ args0 ->
                    args0.let({ args0 -> args0.toJava() })
                }),
            ).build()
}

/**
 * Builder for [GdcApplicationEnvironmentArgs].
 */
@PulumiTagMarker
public class GdcApplicationEnvironmentArgsBuilder internal constructor() {
    private var annotations: Output>? = null

    private var applicationEnvironmentId: Output? = null

    private var displayName: Output? = null

    private var labels: Output>? = null

    private var location: Output? = null

    private var namespace: Output? = null

    private var project: Output? = null

    private var serviceinstance: Output? = null

    private var sparkApplicationEnvironmentConfig:
        Output? = null

    /**
     * @param value The annotations to associate with this application environment. Annotations may be used to store client information, but are not used by the server.
     * **Note**: This field is non-authoritative, and will only manage the annotations present in your configuration.
     * Please refer to the field `effective_annotations` for all of the annotations present on the resource.
     */
    @JvmName("udhafqgdadimpuuu")
    public suspend fun annotations(`value`: Output>) {
        this.annotations = value
    }

    /**
     * @param value The id of the application environment
     */
    @JvmName("ufljuersatxxtmep")
    public suspend fun applicationEnvironmentId(`value`: Output) {
        this.applicationEnvironmentId = value
    }

    /**
     * @param value User-provided human-readable name to be used in user interfaces.
     */
    @JvmName("lmtkfvhambrbfbxm")
    public suspend fun displayName(`value`: Output) {
        this.displayName = value
    }

    /**
     * @param value The labels to associate with this application environment. Labels may be used for filtering and billing tracking.
     * **Note**: This field is non-authoritative, and will only manage the labels present in your configuration.
     * Please refer to the field `effective_labels` for all of the labels present on the resource.
     */
    @JvmName("tlqixhwnwxdrslqp")
    public suspend fun labels(`value`: Output>) {
        this.labels = value
    }

    /**
     * @param value The location of the application environment
     */
    @JvmName("napcfepwkvjgwvah")
    public suspend fun location(`value`: Output) {
        this.location = value
    }

    /**
     * @param value The name of the namespace in which to create this ApplicationEnvironment. This namespace must already exist in the cluster
     */
    @JvmName("tqmnophufuxcjgmt")
    public suspend fun namespace(`value`: Output) {
        this.namespace = value
    }

    /**
     * @param value The ID of the project in which the resource belongs.
     * If it is not provided, the provider project is used.
     */
    @JvmName("ycsyalttstxqvmvg")
    public suspend fun project(`value`: Output) {
        this.project = value
    }

    /**
     * @param value The id of the service instance to which this application environment belongs.
     * - - -
     */
    @JvmName("uhmqspybjexnhohf")
    public suspend fun serviceinstance(`value`: Output) {
        this.serviceinstance = value
    }

    /**
     * @param value Represents the SparkApplicationEnvironmentConfig.
     * Structure is documented below.
     */
    @JvmName("rpnlhtlcmjcynkwy")
    public suspend fun sparkApplicationEnvironmentConfig(`value`: Output) {
        this.sparkApplicationEnvironmentConfig = value
    }

    /**
     * @param value The annotations to associate with this application environment. Annotations may be used to store client information, but are not used by the server.
     * **Note**: This field is non-authoritative, and will only manage the annotations present in your configuration.
     * Please refer to the field `effective_annotations` for all of the annotations present on the resource.
     */
    @JvmName("rrhxddjaplisadwv")
    public suspend fun annotations(`value`: Map?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.annotations = mapped
    }

    /**
     * @param values The annotations to associate with this application environment. Annotations may be used to store client information, but are not used by the server.
     * **Note**: This field is non-authoritative, and will only manage the annotations present in your configuration.
     * Please refer to the field `effective_annotations` for all of the annotations present on the resource.
     */
    @JvmName("xxbupisgwasbrwpv")
    public fun annotations(vararg values: Pair) {
        val toBeMapped = values.toMap()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.annotations = mapped
    }

    /**
     * @param value The id of the application environment
     */
    @JvmName("drvgdylchnylevan")
    public suspend fun applicationEnvironmentId(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.applicationEnvironmentId = mapped
    }

    /**
     * @param value User-provided human-readable name to be used in user interfaces.
     */
    @JvmName("oftycxioljkodoon")
    public suspend fun displayName(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.displayName = mapped
    }

    /**
     * @param value The labels to associate with this application environment. Labels may be used for filtering and billing tracking.
     * **Note**: This field is non-authoritative, and will only manage the labels present in your configuration.
     * Please refer to the field `effective_labels` for all of the labels present on the resource.
     */
    @JvmName("ocjixylqcwnscaff")
    public suspend fun labels(`value`: Map?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.labels = mapped
    }

    /**
     * @param values The labels to associate with this application environment. Labels may be used for filtering and billing tracking.
     * **Note**: This field is non-authoritative, and will only manage the labels present in your configuration.
     * Please refer to the field `effective_labels` for all of the labels present on the resource.
     */
    @JvmName("itikkqyaeeelgfxw")
    public fun labels(vararg values: Pair) {
        val toBeMapped = values.toMap()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.labels = mapped
    }

    /**
     * @param value The location of the application environment
     */
    @JvmName("xtgdgehiirmuygbb")
    public suspend fun location(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.location = mapped
    }

    /**
     * @param value The name of the namespace in which to create this ApplicationEnvironment. This namespace must already exist in the cluster
     */
    @JvmName("jtbxapwirbyxngok")
    public suspend fun namespace(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.namespace = mapped
    }

    /**
     * @param value The ID of the project in which the resource belongs.
     * If it is not provided, the provider project is used.
     */
    @JvmName("tarspunwgkntstmb")
    public suspend fun project(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.project = mapped
    }

    /**
     * @param value The id of the service instance to which this application environment belongs.
     * - - -
     */
    @JvmName("ucefmnmhrgipbhmy")
    public suspend fun serviceinstance(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.serviceinstance = mapped
    }

    /**
     * @param value Represents the SparkApplicationEnvironmentConfig.
     * Structure is documented below.
     */
    @JvmName("nsmrmlnhwcsuyocu")
    public suspend fun sparkApplicationEnvironmentConfig(`value`: GdcApplicationEnvironmentSparkApplicationEnvironmentConfigArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.sparkApplicationEnvironmentConfig = mapped
    }

    /**
     * @param argument Represents the SparkApplicationEnvironmentConfig.
     * Structure is documented below.
     */
    @JvmName("yjjqxtkmbnkimtuw")
    public suspend fun sparkApplicationEnvironmentConfig(argument: suspend GdcApplicationEnvironmentSparkApplicationEnvironmentConfigArgsBuilder.() -> Unit) {
        val toBeMapped =
            GdcApplicationEnvironmentSparkApplicationEnvironmentConfigArgsBuilder().applySuspend {
                argument()
            }.build()
        val mapped = of(toBeMapped)
        this.sparkApplicationEnvironmentConfig = mapped
    }

    internal fun build(): GdcApplicationEnvironmentArgs = GdcApplicationEnvironmentArgs(
        annotations = annotations,
        applicationEnvironmentId = applicationEnvironmentId,
        displayName = displayName,
        labels = labels,
        location = location,
        namespace = namespace,
        project = project,
        serviceinstance = serviceinstance,
        sparkApplicationEnvironmentConfig = sparkApplicationEnvironmentConfig,
    )
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy