All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.azure.datafactory.kotlin.LinkedServiceAzureDatabricksArgs.kt Maven / Gradle / Ivy

Go to download

Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.

There is a newer version: 6.15.0.0
Show newest version
@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.azure.datafactory.kotlin

import com.pulumi.azure.datafactory.LinkedServiceAzureDatabricksArgs.builder
import com.pulumi.azure.datafactory.kotlin.inputs.LinkedServiceAzureDatabricksInstancePoolArgs
import com.pulumi.azure.datafactory.kotlin.inputs.LinkedServiceAzureDatabricksInstancePoolArgsBuilder
import com.pulumi.azure.datafactory.kotlin.inputs.LinkedServiceAzureDatabricksKeyVaultPasswordArgs
import com.pulumi.azure.datafactory.kotlin.inputs.LinkedServiceAzureDatabricksKeyVaultPasswordArgsBuilder
import com.pulumi.azure.datafactory.kotlin.inputs.LinkedServiceAzureDatabricksNewClusterConfigArgs
import com.pulumi.azure.datafactory.kotlin.inputs.LinkedServiceAzureDatabricksNewClusterConfigArgsBuilder
import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.applySuspend
import kotlin.Pair
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.collections.List
import kotlin.collections.Map
import kotlin.jvm.JvmName

/**
 * Manages a Linked Service (connection) between Azure Databricks and Azure Data Factory.
 * ## Example Usage
 * ### With Managed Identity & New Cluster
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as azure from "@pulumi/azure";
 * const example = new azure.core.ResourceGroup("example", {
 *     name: "example",
 *     location: "East US",
 * });
 * //Create a Linked Service using managed identity and new cluster config
 * const exampleFactory = new azure.datafactory.Factory("example", {
 *     name: "TestDtaFactory92783401247",
 *     location: example.location,
 *     resourceGroupName: example.name,
 *     identity: {
 *         type: "SystemAssigned",
 *     },
 * });
 * //Create a databricks instance
 * const exampleWorkspace = new azure.databricks.Workspace("example", {
 *     name: "databricks-test",
 *     resourceGroupName: example.name,
 *     location: example.location,
 *     sku: "standard",
 * });
 * const msiLinked = new azure.datafactory.LinkedServiceAzureDatabricks("msi_linked", {
 *     name: "ADBLinkedServiceViaMSI",
 *     dataFactoryId: exampleFactory.id,
 *     description: "ADB Linked Service via MSI",
 *     adbDomain: pulumi.interpolate`https://${exampleWorkspace.workspaceUrl}`,
 *     msiWorkSpaceResourceId: exampleWorkspace.id,
 *     newClusterConfig: {
 *         nodeType: "Standard_NC12",
 *         clusterVersion: "5.5.x-gpu-scala2.11",
 *         minNumberOfWorkers: 1,
 *         maxNumberOfWorkers: 5,
 *         driverNodeType: "Standard_NC12",
 *         logDestination: "dbfs:/logs",
 *         customTags: {
 *             custom_tag1: "sct_value_1",
 *             custom_tag2: "sct_value_2",
 *         },
 *         sparkConfig: {
 *             config1: "value1",
 *             config2: "value2",
 *         },
 *         sparkEnvironmentVariables: {
 *             envVar1: "value1",
 *             envVar2: "value2",
 *         },
 *         initScripts: [
 *             "init.sh",
 *             "init2.sh",
 *         ],
 *     },
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_azure as azure
 * example = azure.core.ResourceGroup("example",
 *     name="example",
 *     location="East US")
 * #Create a Linked Service using managed identity and new cluster config
 * example_factory = azure.datafactory.Factory("example",
 *     name="TestDtaFactory92783401247",
 *     location=example.location,
 *     resource_group_name=example.name,
 *     identity={
 *         "type": "SystemAssigned",
 *     })
 * #Create a databricks instance
 * example_workspace = azure.databricks.Workspace("example",
 *     name="databricks-test",
 *     resource_group_name=example.name,
 *     location=example.location,
 *     sku="standard")
 * msi_linked = azure.datafactory.LinkedServiceAzureDatabricks("msi_linked",
 *     name="ADBLinkedServiceViaMSI",
 *     data_factory_id=example_factory.id,
 *     description="ADB Linked Service via MSI",
 *     adb_domain=example_workspace.workspace_url.apply(lambda workspace_url: f"https://{workspace_url}"),
 *     msi_work_space_resource_id=example_workspace.id,
 *     new_cluster_config={
 *         "node_type": "Standard_NC12",
 *         "cluster_version": "5.5.x-gpu-scala2.11",
 *         "min_number_of_workers": 1,
 *         "max_number_of_workers": 5,
 *         "driver_node_type": "Standard_NC12",
 *         "log_destination": "dbfs:/logs",
 *         "custom_tags": {
 *             "custom_tag1": "sct_value_1",
 *             "custom_tag2": "sct_value_2",
 *         },
 *         "spark_config": {
 *             "config1": "value1",
 *             "config2": "value2",
 *         },
 *         "spark_environment_variables": {
 *             "env_var1": "value1",
 *             "env_var2": "value2",
 *         },
 *         "init_scripts": [
 *             "init.sh",
 *             "init2.sh",
 *         ],
 *     })
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Azure = Pulumi.Azure;
 * return await Deployment.RunAsync(() =>
 * {
 *     var example = new Azure.Core.ResourceGroup("example", new()
 *     {
 *         Name = "example",
 *         Location = "East US",
 *     });
 *     //Create a Linked Service using managed identity and new cluster config
 *     var exampleFactory = new Azure.DataFactory.Factory("example", new()
 *     {
 *         Name = "TestDtaFactory92783401247",
 *         Location = example.Location,
 *         ResourceGroupName = example.Name,
 *         Identity = new Azure.DataFactory.Inputs.FactoryIdentityArgs
 *         {
 *             Type = "SystemAssigned",
 *         },
 *     });
 *     //Create a databricks instance
 *     var exampleWorkspace = new Azure.DataBricks.Workspace("example", new()
 *     {
 *         Name = "databricks-test",
 *         ResourceGroupName = example.Name,
 *         Location = example.Location,
 *         Sku = "standard",
 *     });
 *     var msiLinked = new Azure.DataFactory.LinkedServiceAzureDatabricks("msi_linked", new()
 *     {
 *         Name = "ADBLinkedServiceViaMSI",
 *         DataFactoryId = exampleFactory.Id,
 *         Description = "ADB Linked Service via MSI",
 *         AdbDomain = exampleWorkspace.WorkspaceUrl.Apply(workspaceUrl => $"https://{workspaceUrl}"),
 *         MsiWorkSpaceResourceId = exampleWorkspace.Id,
 *         NewClusterConfig = new Azure.DataFactory.Inputs.LinkedServiceAzureDatabricksNewClusterConfigArgs
 *         {
 *             NodeType = "Standard_NC12",
 *             ClusterVersion = "5.5.x-gpu-scala2.11",
 *             MinNumberOfWorkers = 1,
 *             MaxNumberOfWorkers = 5,
 *             DriverNodeType = "Standard_NC12",
 *             LogDestination = "dbfs:/logs",
 *             CustomTags =
 *             {
 *                 { "custom_tag1", "sct_value_1" },
 *                 { "custom_tag2", "sct_value_2" },
 *             },
 *             SparkConfig =
 *             {
 *                 { "config1", "value1" },
 *                 { "config2", "value2" },
 *             },
 *             SparkEnvironmentVariables =
 *             {
 *                 { "envVar1", "value1" },
 *                 { "envVar2", "value2" },
 *             },
 *             InitScripts = new[]
 *             {
 *                 "init.sh",
 *                 "init2.sh",
 *             },
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"fmt"
 * 	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/core"
 * 	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/databricks"
 * 	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/datafactory"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		example, err := core.NewResourceGroup(ctx, "example", &core.ResourceGroupArgs{
 * 			Name:     pulumi.String("example"),
 * 			Location: pulumi.String("East US"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		// Create a Linked Service using managed identity and new cluster config
 * 		exampleFactory, err := datafactory.NewFactory(ctx, "example", &datafactory.FactoryArgs{
 * 			Name:              pulumi.String("TestDtaFactory92783401247"),
 * 			Location:          example.Location,
 * 			ResourceGroupName: example.Name,
 * 			Identity: &datafactory.FactoryIdentityArgs{
 * 				Type: pulumi.String("SystemAssigned"),
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		// Create a databricks instance
 * 		exampleWorkspace, err := databricks.NewWorkspace(ctx, "example", &databricks.WorkspaceArgs{
 * 			Name:              pulumi.String("databricks-test"),
 * 			ResourceGroupName: example.Name,
 * 			Location:          example.Location,
 * 			Sku:               pulumi.String("standard"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = datafactory.NewLinkedServiceAzureDatabricks(ctx, "msi_linked", &datafactory.LinkedServiceAzureDatabricksArgs{
 * 			Name:          pulumi.String("ADBLinkedServiceViaMSI"),
 * 			DataFactoryId: exampleFactory.ID(),
 * 			Description:   pulumi.String("ADB Linked Service via MSI"),
 * 			AdbDomain: exampleWorkspace.WorkspaceUrl.ApplyT(func(workspaceUrl string) (string, error) {
 * 				return fmt.Sprintf("https://%v", workspaceUrl), nil
 * 			}).(pulumi.StringOutput),
 * 			MsiWorkSpaceResourceId: exampleWorkspace.ID(),
 * 			NewClusterConfig: &datafactory.LinkedServiceAzureDatabricksNewClusterConfigArgs{
 * 				NodeType:           pulumi.String("Standard_NC12"),
 * 				ClusterVersion:     pulumi.String("5.5.x-gpu-scala2.11"),
 * 				MinNumberOfWorkers: pulumi.Int(1),
 * 				MaxNumberOfWorkers: pulumi.Int(5),
 * 				DriverNodeType:     pulumi.String("Standard_NC12"),
 * 				LogDestination:     pulumi.String("dbfs:/logs"),
 * 				CustomTags: pulumi.StringMap{
 * 					"custom_tag1": pulumi.String("sct_value_1"),
 * 					"custom_tag2": pulumi.String("sct_value_2"),
 * 				},
 * 				SparkConfig: pulumi.StringMap{
 * 					"config1": pulumi.String("value1"),
 * 					"config2": pulumi.String("value2"),
 * 				},
 * 				SparkEnvironmentVariables: pulumi.StringMap{
 * 					"envVar1": pulumi.String("value1"),
 * 					"envVar2": pulumi.String("value2"),
 * 				},
 * 				InitScripts: pulumi.StringArray{
 * 					pulumi.String("init.sh"),
 * 					pulumi.String("init2.sh"),
 * 				},
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.azure.core.ResourceGroup;
 * import com.pulumi.azure.core.ResourceGroupArgs;
 * import com.pulumi.azure.datafactory.Factory;
 * import com.pulumi.azure.datafactory.FactoryArgs;
 * import com.pulumi.azure.datafactory.inputs.FactoryIdentityArgs;
 * import com.pulumi.azure.databricks.Workspace;
 * import com.pulumi.azure.databricks.WorkspaceArgs;
 * import com.pulumi.azure.datafactory.LinkedServiceAzureDatabricks;
 * import com.pulumi.azure.datafactory.LinkedServiceAzureDatabricksArgs;
 * import com.pulumi.azure.datafactory.inputs.LinkedServiceAzureDatabricksNewClusterConfigArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var example = new ResourceGroup("example", ResourceGroupArgs.builder()
 *             .name("example")
 *             .location("East US")
 *             .build());
 *         //Create a Linked Service using managed identity and new cluster config
 *         var exampleFactory = new Factory("exampleFactory", FactoryArgs.builder()
 *             .name("TestDtaFactory92783401247")
 *             .location(example.location())
 *             .resourceGroupName(example.name())
 *             .identity(FactoryIdentityArgs.builder()
 *                 .type("SystemAssigned")
 *                 .build())
 *             .build());
 *         //Create a databricks instance
 *         var exampleWorkspace = new Workspace("exampleWorkspace", WorkspaceArgs.builder()
 *             .name("databricks-test")
 *             .resourceGroupName(example.name())
 *             .location(example.location())
 *             .sku("standard")
 *             .build());
 *         var msiLinked = new LinkedServiceAzureDatabricks("msiLinked", LinkedServiceAzureDatabricksArgs.builder()
 *             .name("ADBLinkedServiceViaMSI")
 *             .dataFactoryId(exampleFactory.id())
 *             .description("ADB Linked Service via MSI")
 *             .adbDomain(exampleWorkspace.workspaceUrl().applyValue(workspaceUrl -> String.format("https://%s", workspaceUrl)))
 *             .msiWorkSpaceResourceId(exampleWorkspace.id())
 *             .newClusterConfig(LinkedServiceAzureDatabricksNewClusterConfigArgs.builder()
 *                 .nodeType("Standard_NC12")
 *                 .clusterVersion("5.5.x-gpu-scala2.11")
 *                 .minNumberOfWorkers(1)
 *                 .maxNumberOfWorkers(5)
 *                 .driverNodeType("Standard_NC12")
 *                 .logDestination("dbfs:/logs")
 *                 .customTags(Map.ofEntries(
 *                     Map.entry("custom_tag1", "sct_value_1"),
 *                     Map.entry("custom_tag2", "sct_value_2")
 *                 ))
 *                 .sparkConfig(Map.ofEntries(
 *                     Map.entry("config1", "value1"),
 *                     Map.entry("config2", "value2")
 *                 ))
 *                 .sparkEnvironmentVariables(Map.ofEntries(
 *                     Map.entry("envVar1", "value1"),
 *                     Map.entry("envVar2", "value2")
 *                 ))
 *                 .initScripts(
 *                     "init.sh",
 *                     "init2.sh")
 *                 .build())
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   example:
 *     type: azure:core:ResourceGroup
 *     properties:
 *       name: example
 *       location: East US
 *   #Create a Linked Service using managed identity and new cluster config
 *   exampleFactory:
 *     type: azure:datafactory:Factory
 *     name: example
 *     properties:
 *       name: TestDtaFactory92783401247
 *       location: ${example.location}
 *       resourceGroupName: ${example.name}
 *       identity:
 *         type: SystemAssigned
 *   #Create a databricks instance
 *   exampleWorkspace:
 *     type: azure:databricks:Workspace
 *     name: example
 *     properties:
 *       name: databricks-test
 *       resourceGroupName: ${example.name}
 *       location: ${example.location}
 *       sku: standard
 *   msiLinked:
 *     type: azure:datafactory:LinkedServiceAzureDatabricks
 *     name: msi_linked
 *     properties:
 *       name: ADBLinkedServiceViaMSI
 *       dataFactoryId: ${exampleFactory.id}
 *       description: ADB Linked Service via MSI
 *       adbDomain: https://${exampleWorkspace.workspaceUrl}
 *       msiWorkSpaceResourceId: ${exampleWorkspace.id}
 *       newClusterConfig:
 *         nodeType: Standard_NC12
 *         clusterVersion: 5.5.x-gpu-scala2.11
 *         minNumberOfWorkers: 1
 *         maxNumberOfWorkers: 5
 *         driverNodeType: Standard_NC12
 *         logDestination: dbfs:/logs
 *         customTags:
 *           custom_tag1: sct_value_1
 *           custom_tag2: sct_value_2
 *         sparkConfig:
 *           config1: value1
 *           config2: value2
 *         sparkEnvironmentVariables:
 *           envVar1: value1
 *           envVar2: value2
 *         initScripts:
 *           - init.sh
 *           - init2.sh
 * ```
 * 
 * ### With Access Token & Existing Cluster
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as azure from "@pulumi/azure";
 * const example = new azure.core.ResourceGroup("example", {
 *     name: "example",
 *     location: "East US",
 * });
 * //Link to an existing cluster via access token
 * const exampleFactory = new azure.datafactory.Factory("example", {
 *     name: "TestDtaFactory92783401247",
 *     location: example.location,
 *     resourceGroupName: example.name,
 * });
 * //Create a databricks instance
 * const exampleWorkspace = new azure.databricks.Workspace("example", {
 *     name: "databricks-test",
 *     resourceGroupName: example.name,
 *     location: example.location,
 *     sku: "standard",
 * });
 * const atLinked = new azure.datafactory.LinkedServiceAzureDatabricks("at_linked", {
 *     name: "ADBLinkedServiceViaAccessToken",
 *     dataFactoryId: exampleFactory.id,
 *     description: "ADB Linked Service via Access Token",
 *     existingClusterId: "0308-201146-sly615",
 *     accessToken: "SomeDatabricksAccessToken",
 *     adbDomain: pulumi.interpolate`https://${exampleWorkspace.workspaceUrl}`,
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_azure as azure
 * example = azure.core.ResourceGroup("example",
 *     name="example",
 *     location="East US")
 * #Link to an existing cluster via access token
 * example_factory = azure.datafactory.Factory("example",
 *     name="TestDtaFactory92783401247",
 *     location=example.location,
 *     resource_group_name=example.name)
 * #Create a databricks instance
 * example_workspace = azure.databricks.Workspace("example",
 *     name="databricks-test",
 *     resource_group_name=example.name,
 *     location=example.location,
 *     sku="standard")
 * at_linked = azure.datafactory.LinkedServiceAzureDatabricks("at_linked",
 *     name="ADBLinkedServiceViaAccessToken",
 *     data_factory_id=example_factory.id,
 *     description="ADB Linked Service via Access Token",
 *     existing_cluster_id="0308-201146-sly615",
 *     access_token="SomeDatabricksAccessToken",
 *     adb_domain=example_workspace.workspace_url.apply(lambda workspace_url: f"https://{workspace_url}"))
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Azure = Pulumi.Azure;
 * return await Deployment.RunAsync(() =>
 * {
 *     var example = new Azure.Core.ResourceGroup("example", new()
 *     {
 *         Name = "example",
 *         Location = "East US",
 *     });
 *     //Link to an existing cluster via access token
 *     var exampleFactory = new Azure.DataFactory.Factory("example", new()
 *     {
 *         Name = "TestDtaFactory92783401247",
 *         Location = example.Location,
 *         ResourceGroupName = example.Name,
 *     });
 *     //Create a databricks instance
 *     var exampleWorkspace = new Azure.DataBricks.Workspace("example", new()
 *     {
 *         Name = "databricks-test",
 *         ResourceGroupName = example.Name,
 *         Location = example.Location,
 *         Sku = "standard",
 *     });
 *     var atLinked = new Azure.DataFactory.LinkedServiceAzureDatabricks("at_linked", new()
 *     {
 *         Name = "ADBLinkedServiceViaAccessToken",
 *         DataFactoryId = exampleFactory.Id,
 *         Description = "ADB Linked Service via Access Token",
 *         ExistingClusterId = "0308-201146-sly615",
 *         AccessToken = "SomeDatabricksAccessToken",
 *         AdbDomain = exampleWorkspace.WorkspaceUrl.Apply(workspaceUrl => $"https://{workspaceUrl}"),
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"fmt"
 * 	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/core"
 * 	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/databricks"
 * 	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/datafactory"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		example, err := core.NewResourceGroup(ctx, "example", &core.ResourceGroupArgs{
 * 			Name:     pulumi.String("example"),
 * 			Location: pulumi.String("East US"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		// Link to an existing cluster via access token
 * 		exampleFactory, err := datafactory.NewFactory(ctx, "example", &datafactory.FactoryArgs{
 * 			Name:              pulumi.String("TestDtaFactory92783401247"),
 * 			Location:          example.Location,
 * 			ResourceGroupName: example.Name,
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		// Create a databricks instance
 * 		exampleWorkspace, err := databricks.NewWorkspace(ctx, "example", &databricks.WorkspaceArgs{
 * 			Name:              pulumi.String("databricks-test"),
 * 			ResourceGroupName: example.Name,
 * 			Location:          example.Location,
 * 			Sku:               pulumi.String("standard"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = datafactory.NewLinkedServiceAzureDatabricks(ctx, "at_linked", &datafactory.LinkedServiceAzureDatabricksArgs{
 * 			Name:              pulumi.String("ADBLinkedServiceViaAccessToken"),
 * 			DataFactoryId:     exampleFactory.ID(),
 * 			Description:       pulumi.String("ADB Linked Service via Access Token"),
 * 			ExistingClusterId: pulumi.String("0308-201146-sly615"),
 * 			AccessToken:       pulumi.String("SomeDatabricksAccessToken"),
 * 			AdbDomain: exampleWorkspace.WorkspaceUrl.ApplyT(func(workspaceUrl string) (string, error) {
 * 				return fmt.Sprintf("https://%v", workspaceUrl), nil
 * 			}).(pulumi.StringOutput),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.azure.core.ResourceGroup;
 * import com.pulumi.azure.core.ResourceGroupArgs;
 * import com.pulumi.azure.datafactory.Factory;
 * import com.pulumi.azure.datafactory.FactoryArgs;
 * import com.pulumi.azure.databricks.Workspace;
 * import com.pulumi.azure.databricks.WorkspaceArgs;
 * import com.pulumi.azure.datafactory.LinkedServiceAzureDatabricks;
 * import com.pulumi.azure.datafactory.LinkedServiceAzureDatabricksArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var example = new ResourceGroup("example", ResourceGroupArgs.builder()
 *             .name("example")
 *             .location("East US")
 *             .build());
 *         //Link to an existing cluster via access token
 *         var exampleFactory = new Factory("exampleFactory", FactoryArgs.builder()
 *             .name("TestDtaFactory92783401247")
 *             .location(example.location())
 *             .resourceGroupName(example.name())
 *             .build());
 *         //Create a databricks instance
 *         var exampleWorkspace = new Workspace("exampleWorkspace", WorkspaceArgs.builder()
 *             .name("databricks-test")
 *             .resourceGroupName(example.name())
 *             .location(example.location())
 *             .sku("standard")
 *             .build());
 *         var atLinked = new LinkedServiceAzureDatabricks("atLinked", LinkedServiceAzureDatabricksArgs.builder()
 *             .name("ADBLinkedServiceViaAccessToken")
 *             .dataFactoryId(exampleFactory.id())
 *             .description("ADB Linked Service via Access Token")
 *             .existingClusterId("0308-201146-sly615")
 *             .accessToken("SomeDatabricksAccessToken")
 *             .adbDomain(exampleWorkspace.workspaceUrl().applyValue(workspaceUrl -> String.format("https://%s", workspaceUrl)))
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   example:
 *     type: azure:core:ResourceGroup
 *     properties:
 *       name: example
 *       location: East US
 *   #Link to an existing cluster via access token
 *   exampleFactory:
 *     type: azure:datafactory:Factory
 *     name: example
 *     properties:
 *       name: TestDtaFactory92783401247
 *       location: ${example.location}
 *       resourceGroupName: ${example.name}
 *   #Create a databricks instance
 *   exampleWorkspace:
 *     type: azure:databricks:Workspace
 *     name: example
 *     properties:
 *       name: databricks-test
 *       resourceGroupName: ${example.name}
 *       location: ${example.location}
 *       sku: standard
 *   atLinked:
 *     type: azure:datafactory:LinkedServiceAzureDatabricks
 *     name: at_linked
 *     properties:
 *       name: ADBLinkedServiceViaAccessToken
 *       dataFactoryId: ${exampleFactory.id}
 *       description: ADB Linked Service via Access Token
 *       existingClusterId: 0308-201146-sly615
 *       accessToken: SomeDatabricksAccessToken
 *       adbDomain: https://${exampleWorkspace.workspaceUrl}
 * ```
 * 
 * ## Import
 * Data Factory Linked Services can be imported using the `resource id`, e.g.
 * ```sh
 * $ pulumi import azure:datafactory/linkedServiceAzureDatabricks:LinkedServiceAzureDatabricks example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/linkedservices/example
 * ```
 * @property accessToken Authenticate to ADB via an access token.
 * @property adbDomain The domain URL of the databricks instance.
 * @property additionalProperties A map of additional properties to associate with the Data Factory Linked Service.
 * @property annotations List of tags that can be used for describing the Data Factory Linked Service.
 * @property dataFactoryId The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
 * @property description The description for the Data Factory Linked Service.
 * @property existingClusterId The cluster_id of an existing cluster within the linked ADB instance.
 * @property instancePool Leverages an instance pool within the linked ADB instance as one `instance_pool` block defined below.
 * @property integrationRuntimeName The integration runtime reference to associate with the Data Factory Linked Service.
 * @property keyVaultPassword Authenticate to ADB via Azure Key Vault Linked Service as defined in the `key_vault_password` block below.
 * @property msiWorkSpaceResourceId Authenticate to ADB via managed service identity.
 * @property name Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the [Microsoft documentation](https://docs.microsoft.com/azure/data-factory/naming-rules) for all restrictions.
 * @property newClusterConfig Creates new clusters within the linked ADB instance as defined in the `new_cluster_config` block below.
 * @property parameters A map of parameters to associate with the Data Factory Linked Service.
 */
public data class LinkedServiceAzureDatabricksArgs(
    public val accessToken: Output? = null,
    public val adbDomain: Output? = null,
    public val additionalProperties: Output>? = null,
    public val annotations: Output>? = null,
    public val dataFactoryId: Output? = null,
    public val description: Output? = null,
    public val existingClusterId: Output? = null,
    public val instancePool: Output? = null,
    public val integrationRuntimeName: Output? = null,
    public val keyVaultPassword: Output? = null,
    public val msiWorkSpaceResourceId: Output? = null,
    public val name: Output? = null,
    public val newClusterConfig: Output? = null,
    public val parameters: Output>? = null,
) : ConvertibleToJava {
    override fun toJava(): com.pulumi.azure.datafactory.LinkedServiceAzureDatabricksArgs =
        com.pulumi.azure.datafactory.LinkedServiceAzureDatabricksArgs.builder()
            .accessToken(accessToken?.applyValue({ args0 -> args0 }))
            .adbDomain(adbDomain?.applyValue({ args0 -> args0 }))
            .additionalProperties(
                additionalProperties?.applyValue({ args0 ->
                    args0.map({ args0 ->
                        args0.key.to(args0.value)
                    }).toMap()
                }),
            )
            .annotations(annotations?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
            .dataFactoryId(dataFactoryId?.applyValue({ args0 -> args0 }))
            .description(description?.applyValue({ args0 -> args0 }))
            .existingClusterId(existingClusterId?.applyValue({ args0 -> args0 }))
            .instancePool(instancePool?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
            .integrationRuntimeName(integrationRuntimeName?.applyValue({ args0 -> args0 }))
            .keyVaultPassword(keyVaultPassword?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
            .msiWorkSpaceResourceId(msiWorkSpaceResourceId?.applyValue({ args0 -> args0 }))
            .name(name?.applyValue({ args0 -> args0 }))
            .newClusterConfig(newClusterConfig?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
            .parameters(
                parameters?.applyValue({ args0 ->
                    args0.map({ args0 ->
                        args0.key.to(args0.value)
                    }).toMap()
                }),
            ).build()
}

/**
 * Builder for [LinkedServiceAzureDatabricksArgs].
 */
@PulumiTagMarker
public class LinkedServiceAzureDatabricksArgsBuilder internal constructor() {
    private var accessToken: Output? = null

    private var adbDomain: Output? = null

    private var additionalProperties: Output>? = null

    private var annotations: Output>? = null

    private var dataFactoryId: Output? = null

    private var description: Output? = null

    private var existingClusterId: Output? = null

    private var instancePool: Output? = null

    private var integrationRuntimeName: Output? = null

    private var keyVaultPassword: Output? = null

    private var msiWorkSpaceResourceId: Output? = null

    private var name: Output? = null

    private var newClusterConfig: Output? = null

    private var parameters: Output>? = null

    /**
     * @param value Authenticate to ADB via an access token.
     */
    @JvmName("ntqdfoyhhumdbjbp")
    public suspend fun accessToken(`value`: Output) {
        this.accessToken = value
    }

    /**
     * @param value The domain URL of the databricks instance.
     */
    @JvmName("rdodjgeiaretfjsh")
    public suspend fun adbDomain(`value`: Output) {
        this.adbDomain = value
    }

    /**
     * @param value A map of additional properties to associate with the Data Factory Linked Service.
     */
    @JvmName("facgdtxinkayfvfh")
    public suspend fun additionalProperties(`value`: Output>) {
        this.additionalProperties = value
    }

    /**
     * @param value List of tags that can be used for describing the Data Factory Linked Service.
     */
    @JvmName("wluwkjjhkmjfjlmn")
    public suspend fun annotations(`value`: Output>) {
        this.annotations = value
    }

    @JvmName("mqdlahsonkmrcdhk")
    public suspend fun annotations(vararg values: Output) {
        this.annotations = Output.all(values.asList())
    }

    /**
     * @param values List of tags that can be used for describing the Data Factory Linked Service.
     */
    @JvmName("wnubpfhfboylvrwr")
    public suspend fun annotations(values: List>) {
        this.annotations = Output.all(values)
    }

    /**
     * @param value The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
     */
    @JvmName("reydjrlvnsylmdxh")
    public suspend fun dataFactoryId(`value`: Output) {
        this.dataFactoryId = value
    }

    /**
     * @param value The description for the Data Factory Linked Service.
     */
    @JvmName("jgxfeeleatxwupxi")
    public suspend fun description(`value`: Output) {
        this.description = value
    }

    /**
     * @param value The cluster_id of an existing cluster within the linked ADB instance.
     */
    @JvmName("yaulxtjtnavvalvr")
    public suspend fun existingClusterId(`value`: Output) {
        this.existingClusterId = value
    }

    /**
     * @param value Leverages an instance pool within the linked ADB instance as one `instance_pool` block defined below.
     */
    @JvmName("sghsqudgudkvfwto")
    public suspend fun instancePool(`value`: Output) {
        this.instancePool = value
    }

    /**
     * @param value The integration runtime reference to associate with the Data Factory Linked Service.
     */
    @JvmName("ueegqtxuiujwioxc")
    public suspend fun integrationRuntimeName(`value`: Output) {
        this.integrationRuntimeName = value
    }

    /**
     * @param value Authenticate to ADB via Azure Key Vault Linked Service as defined in the `key_vault_password` block below.
     */
    @JvmName("hyxfmovvbumudvap")
    public suspend fun keyVaultPassword(`value`: Output) {
        this.keyVaultPassword = value
    }

    /**
     * @param value Authenticate to ADB via managed service identity.
     */
    @JvmName("ofjpsxbfjhrxeuvn")
    public suspend fun msiWorkSpaceResourceId(`value`: Output) {
        this.msiWorkSpaceResourceId = value
    }

    /**
     * @param value Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the [Microsoft documentation](https://docs.microsoft.com/azure/data-factory/naming-rules) for all restrictions.
     */
    @JvmName("nafhmnixhbpatiwu")
    public suspend fun name(`value`: Output) {
        this.name = value
    }

    /**
     * @param value Creates new clusters within the linked ADB instance as defined in the `new_cluster_config` block below.
     */
    @JvmName("rjfvlepnjvtgjduy")
    public suspend fun newClusterConfig(`value`: Output) {
        this.newClusterConfig = value
    }

    /**
     * @param value A map of parameters to associate with the Data Factory Linked Service.
     */
    @JvmName("boewvkinrvjdrdaw")
    public suspend fun parameters(`value`: Output>) {
        this.parameters = value
    }

    /**
     * @param value Authenticate to ADB via an access token.
     */
    @JvmName("yhnertqcfwwgnpws")
    public suspend fun accessToken(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.accessToken = mapped
    }

    /**
     * @param value The domain URL of the databricks instance.
     */
    @JvmName("kvanuqobfibwtort")
    public suspend fun adbDomain(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.adbDomain = mapped
    }

    /**
     * @param value A map of additional properties to associate with the Data Factory Linked Service.
     */
    @JvmName("ltncmeibrohesvei")
    public suspend fun additionalProperties(`value`: Map?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.additionalProperties = mapped
    }

    /**
     * @param values A map of additional properties to associate with the Data Factory Linked Service.
     */
    @JvmName("beurahxwbjsguupr")
    public fun additionalProperties(vararg values: Pair) {
        val toBeMapped = values.toMap()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.additionalProperties = mapped
    }

    /**
     * @param value List of tags that can be used for describing the Data Factory Linked Service.
     */
    @JvmName("blfrfikixotybxpd")
    public suspend fun annotations(`value`: List?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.annotations = mapped
    }

    /**
     * @param values List of tags that can be used for describing the Data Factory Linked Service.
     */
    @JvmName("gogahdavpexogues")
    public suspend fun annotations(vararg values: String) {
        val toBeMapped = values.toList()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.annotations = mapped
    }

    /**
     * @param value The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
     */
    @JvmName("lsynkcounoddfjrv")
    public suspend fun dataFactoryId(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.dataFactoryId = mapped
    }

    /**
     * @param value The description for the Data Factory Linked Service.
     */
    @JvmName("gjdmdeihljcmtjsp")
    public suspend fun description(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.description = mapped
    }

    /**
     * @param value The cluster_id of an existing cluster within the linked ADB instance.
     */
    @JvmName("wriescexvmgrjdvk")
    public suspend fun existingClusterId(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.existingClusterId = mapped
    }

    /**
     * @param value Leverages an instance pool within the linked ADB instance as one `instance_pool` block defined below.
     */
    @JvmName("fwikjpaabmfvdkws")
    public suspend fun instancePool(`value`: LinkedServiceAzureDatabricksInstancePoolArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.instancePool = mapped
    }

    /**
     * @param argument Leverages an instance pool within the linked ADB instance as one `instance_pool` block defined below.
     */
    @JvmName("owxrfhphryglwdpm")
    public suspend fun instancePool(argument: suspend LinkedServiceAzureDatabricksInstancePoolArgsBuilder.() -> Unit) {
        val toBeMapped = LinkedServiceAzureDatabricksInstancePoolArgsBuilder().applySuspend {
            argument()
        }.build()
        val mapped = of(toBeMapped)
        this.instancePool = mapped
    }

    /**
     * @param value The integration runtime reference to associate with the Data Factory Linked Service.
     */
    @JvmName("tlqxuwbcrxrbgvbe")
    public suspend fun integrationRuntimeName(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.integrationRuntimeName = mapped
    }

    /**
     * @param value Authenticate to ADB via Azure Key Vault Linked Service as defined in the `key_vault_password` block below.
     */
    @JvmName("xnoyeumfielrrfgu")
    public suspend fun keyVaultPassword(`value`: LinkedServiceAzureDatabricksKeyVaultPasswordArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.keyVaultPassword = mapped
    }

    /**
     * @param argument Authenticate to ADB via Azure Key Vault Linked Service as defined in the `key_vault_password` block below.
     */
    @JvmName("aoghwsxuhpkqnhkq")
    public suspend fun keyVaultPassword(argument: suspend LinkedServiceAzureDatabricksKeyVaultPasswordArgsBuilder.() -> Unit) {
        val toBeMapped = LinkedServiceAzureDatabricksKeyVaultPasswordArgsBuilder().applySuspend {
            argument()
        }.build()
        val mapped = of(toBeMapped)
        this.keyVaultPassword = mapped
    }

    /**
     * @param value Authenticate to ADB via managed service identity.
     */
    @JvmName("yxtpgipynnfggbws")
    public suspend fun msiWorkSpaceResourceId(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.msiWorkSpaceResourceId = mapped
    }

    /**
     * @param value Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the [Microsoft documentation](https://docs.microsoft.com/azure/data-factory/naming-rules) for all restrictions.
     */
    @JvmName("eltkirurviocduss")
    public suspend fun name(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.name = mapped
    }

    /**
     * @param value Creates new clusters within the linked ADB instance as defined in the `new_cluster_config` block below.
     */
    @JvmName("wnownfsebotrkxro")
    public suspend fun newClusterConfig(`value`: LinkedServiceAzureDatabricksNewClusterConfigArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.newClusterConfig = mapped
    }

    /**
     * @param argument Creates new clusters within the linked ADB instance as defined in the `new_cluster_config` block below.
     */
    @JvmName("ginpushpsxrvyxxq")
    public suspend fun newClusterConfig(argument: suspend LinkedServiceAzureDatabricksNewClusterConfigArgsBuilder.() -> Unit) {
        val toBeMapped = LinkedServiceAzureDatabricksNewClusterConfigArgsBuilder().applySuspend {
            argument()
        }.build()
        val mapped = of(toBeMapped)
        this.newClusterConfig = mapped
    }

    /**
     * @param value A map of parameters to associate with the Data Factory Linked Service.
     */
    @JvmName("rvqhiyopvoqyiajw")
    public suspend fun parameters(`value`: Map?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.parameters = mapped
    }

    /**
     * @param values A map of parameters to associate with the Data Factory Linked Service.
     */
    @JvmName("jbgxpkvspevpdafm")
    public fun parameters(vararg values: Pair) {
        val toBeMapped = values.toMap()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.parameters = mapped
    }

    internal fun build(): LinkedServiceAzureDatabricksArgs = LinkedServiceAzureDatabricksArgs(
        accessToken = accessToken,
        adbDomain = adbDomain,
        additionalProperties = additionalProperties,
        annotations = annotations,
        dataFactoryId = dataFactoryId,
        description = description,
        existingClusterId = existingClusterId,
        instancePool = instancePool,
        integrationRuntimeName = integrationRuntimeName,
        keyVaultPassword = keyVaultPassword,
        msiWorkSpaceResourceId = msiWorkSpaceResourceId,
        name = name,
        newClusterConfig = newClusterConfig,
        parameters = parameters,
    )
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy