All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.azurenative.datafactory.outputs.AzureDatabricksLinkedServiceResponse Maven / Gradle / Ivy

There is a newer version: 2.72.0
Show newest version
// *** WARNING: this file was generated by pulumi-java-gen. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***

package com.pulumi.azurenative.datafactory.outputs;

import com.pulumi.azurenative.datafactory.outputs.AzureKeyVaultSecretReferenceResponse;
import com.pulumi.azurenative.datafactory.outputs.CredentialReferenceResponse;
import com.pulumi.azurenative.datafactory.outputs.IntegrationRuntimeReferenceResponse;
import com.pulumi.azurenative.datafactory.outputs.ParameterSpecificationResponse;
import com.pulumi.azurenative.datafactory.outputs.SecureStringResponse;
import com.pulumi.core.Either;
import com.pulumi.core.annotations.CustomType;
import com.pulumi.exceptions.MissingRequiredPropertyException;
import java.lang.Object;
import java.lang.String;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import javax.annotation.Nullable;

@CustomType
public final class AzureDatabricksLinkedServiceResponse {
    /**
     * @return Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string).
     * 
     */
    private @Nullable Either accessToken;
    /**
     * @return List of tags that can be used for describing the linked service.
     * 
     */
    private @Nullable List annotations;
    /**
     * @return Required to specify MSI, if using Workspace resource id for databricks REST API. Type: string (or Expression with resultType string).
     * 
     */
    private @Nullable Object authentication;
    /**
     * @return The integration runtime reference.
     * 
     */
    private @Nullable IntegrationRuntimeReferenceResponse connectVia;
    /**
     * @return The credential reference containing authentication information.
     * 
     */
    private @Nullable CredentialReferenceResponse credential;
    /**
     * @return Linked service description.
     * 
     */
    private @Nullable String description;
    /**
     * @return <REGION>.azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string).
     * 
     */
    private Object domain;
    /**
     * @return The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string.
     * 
     */
    private @Nullable String encryptedCredential;
    /**
     * @return The id of an existing interactive cluster that will be used for all runs of this activity. Type: string (or Expression with resultType string).
     * 
     */
    private @Nullable Object existingClusterId;
    /**
     * @return The id of an existing instance pool that will be used for all runs of this activity. Type: string (or Expression with resultType string).
     * 
     */
    private @Nullable Object instancePoolId;
    /**
     * @return Additional tags for cluster resources. This property is ignored in instance pool configurations.
     * 
     */
    private @Nullable Map newClusterCustomTags;
    /**
     * @return The driver node type for the new job cluster. This property is ignored in instance pool configurations. Type: string (or Expression with resultType string).
     * 
     */
    private @Nullable Object newClusterDriverNodeType;
    /**
     * @return Enable the elastic disk on the new cluster. This property is now ignored, and takes the default elastic disk behavior in Databricks (elastic disks are always enabled). Type: boolean (or Expression with resultType boolean).
     * 
     */
    private @Nullable Object newClusterEnableElasticDisk;
    /**
     * @return User-defined initialization scripts for the new cluster. Type: array of strings (or Expression with resultType array of strings).
     * 
     */
    private @Nullable Object newClusterInitScripts;
    /**
     * @return Specify a location to deliver Spark driver, worker, and event logs. Type: string (or Expression with resultType string).
     * 
     */
    private @Nullable Object newClusterLogDestination;
    /**
     * @return The node type of the new job cluster. This property is required if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is specified, this property is ignored. Type: string (or Expression with resultType string).
     * 
     */
    private @Nullable Object newClusterNodeType;
    /**
     * @return If not using an existing interactive cluster, this specifies the number of worker nodes to use for the new job cluster or instance pool. For new job clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto-scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is specified. Type: string (or Expression with resultType string).
     * 
     */
    private @Nullable Object newClusterNumOfWorker;
    /**
     * @return A set of optional, user-specified Spark configuration key-value pairs.
     * 
     */
    private @Nullable Map newClusterSparkConf;
    /**
     * @return A set of optional, user-specified Spark environment variables key-value pairs.
     * 
     */
    private @Nullable Map newClusterSparkEnvVars;
    /**
     * @return If not using an existing interactive cluster, this specifies the Spark version of a new job cluster or instance pool nodes created for each run of this activity. Required if instancePoolId is specified. Type: string (or Expression with resultType string).
     * 
     */
    private @Nullable Object newClusterVersion;
    /**
     * @return Parameters for linked service.
     * 
     */
    private @Nullable Map parameters;
    /**
     * @return The policy id for limiting the ability to configure clusters based on a user defined set of rules. Type: string (or Expression with resultType string).
     * 
     */
    private @Nullable Object policyId;
    /**
     * @return Type of linked service.
     * Expected value is 'AzureDatabricks'.
     * 
     */
    private String type;
    /**
     * @return Version of the linked service.
     * 
     */
    private @Nullable String version;
    /**
     * @return Workspace resource id for databricks REST API. Type: string (or Expression with resultType string).
     * 
     */
    private @Nullable Object workspaceResourceId;

    private AzureDatabricksLinkedServiceResponse() {}
    /**
     * @return Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string).
     * 
     */
    public Optional> accessToken() {
        return Optional.ofNullable(this.accessToken);
    }
    /**
     * @return List of tags that can be used for describing the linked service.
     * 
     */
    public List annotations() {
        return this.annotations == null ? List.of() : this.annotations;
    }
    /**
     * @return Required to specify MSI, if using Workspace resource id for databricks REST API. Type: string (or Expression with resultType string).
     * 
     */
    public Optional authentication() {
        return Optional.ofNullable(this.authentication);
    }
    /**
     * @return The integration runtime reference.
     * 
     */
    public Optional connectVia() {
        return Optional.ofNullable(this.connectVia);
    }
    /**
     * @return The credential reference containing authentication information.
     * 
     */
    public Optional credential() {
        return Optional.ofNullable(this.credential);
    }
    /**
     * @return Linked service description.
     * 
     */
    public Optional description() {
        return Optional.ofNullable(this.description);
    }
    /**
     * @return <REGION>.azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string).
     * 
     */
    public Object domain() {
        return this.domain;
    }
    /**
     * @return The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string.
     * 
     */
    public Optional encryptedCredential() {
        return Optional.ofNullable(this.encryptedCredential);
    }
    /**
     * @return The id of an existing interactive cluster that will be used for all runs of this activity. Type: string (or Expression with resultType string).
     * 
     */
    public Optional existingClusterId() {
        return Optional.ofNullable(this.existingClusterId);
    }
    /**
     * @return The id of an existing instance pool that will be used for all runs of this activity. Type: string (or Expression with resultType string).
     * 
     */
    public Optional instancePoolId() {
        return Optional.ofNullable(this.instancePoolId);
    }
    /**
     * @return Additional tags for cluster resources. This property is ignored in instance pool configurations.
     * 
     */
    public Map newClusterCustomTags() {
        return this.newClusterCustomTags == null ? Map.of() : this.newClusterCustomTags;
    }
    /**
     * @return The driver node type for the new job cluster. This property is ignored in instance pool configurations. Type: string (or Expression with resultType string).
     * 
     */
    public Optional newClusterDriverNodeType() {
        return Optional.ofNullable(this.newClusterDriverNodeType);
    }
    /**
     * @return Enable the elastic disk on the new cluster. This property is now ignored, and takes the default elastic disk behavior in Databricks (elastic disks are always enabled). Type: boolean (or Expression with resultType boolean).
     * 
     */
    public Optional newClusterEnableElasticDisk() {
        return Optional.ofNullable(this.newClusterEnableElasticDisk);
    }
    /**
     * @return User-defined initialization scripts for the new cluster. Type: array of strings (or Expression with resultType array of strings).
     * 
     */
    public Optional newClusterInitScripts() {
        return Optional.ofNullable(this.newClusterInitScripts);
    }
    /**
     * @return Specify a location to deliver Spark driver, worker, and event logs. Type: string (or Expression with resultType string).
     * 
     */
    public Optional newClusterLogDestination() {
        return Optional.ofNullable(this.newClusterLogDestination);
    }
    /**
     * @return The node type of the new job cluster. This property is required if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is specified, this property is ignored. Type: string (or Expression with resultType string).
     * 
     */
    public Optional newClusterNodeType() {
        return Optional.ofNullable(this.newClusterNodeType);
    }
    /**
     * @return If not using an existing interactive cluster, this specifies the number of worker nodes to use for the new job cluster or instance pool. For new job clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto-scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is specified. Type: string (or Expression with resultType string).
     * 
     */
    public Optional newClusterNumOfWorker() {
        return Optional.ofNullable(this.newClusterNumOfWorker);
    }
    /**
     * @return A set of optional, user-specified Spark configuration key-value pairs.
     * 
     */
    public Map newClusterSparkConf() {
        return this.newClusterSparkConf == null ? Map.of() : this.newClusterSparkConf;
    }
    /**
     * @return A set of optional, user-specified Spark environment variables key-value pairs.
     * 
     */
    public Map newClusterSparkEnvVars() {
        return this.newClusterSparkEnvVars == null ? Map.of() : this.newClusterSparkEnvVars;
    }
    /**
     * @return If not using an existing interactive cluster, this specifies the Spark version of a new job cluster or instance pool nodes created for each run of this activity. Required if instancePoolId is specified. Type: string (or Expression with resultType string).
     * 
     */
    public Optional newClusterVersion() {
        return Optional.ofNullable(this.newClusterVersion);
    }
    /**
     * @return Parameters for linked service.
     * 
     */
    public Map parameters() {
        return this.parameters == null ? Map.of() : this.parameters;
    }
    /**
     * @return The policy id for limiting the ability to configure clusters based on a user defined set of rules. Type: string (or Expression with resultType string).
     * 
     */
    public Optional policyId() {
        return Optional.ofNullable(this.policyId);
    }
    /**
     * @return Type of linked service.
     * Expected value is 'AzureDatabricks'.
     * 
     */
    public String type() {
        return this.type;
    }
    /**
     * @return Version of the linked service.
     * 
     */
    public Optional version() {
        return Optional.ofNullable(this.version);
    }
    /**
     * @return Workspace resource id for databricks REST API. Type: string (or Expression with resultType string).
     * 
     */
    public Optional workspaceResourceId() {
        return Optional.ofNullable(this.workspaceResourceId);
    }

    public static Builder builder() {
        return new Builder();
    }

    public static Builder builder(AzureDatabricksLinkedServiceResponse defaults) {
        return new Builder(defaults);
    }
    @CustomType.Builder
    public static final class Builder {
        private @Nullable Either accessToken;
        private @Nullable List annotations;
        private @Nullable Object authentication;
        private @Nullable IntegrationRuntimeReferenceResponse connectVia;
        private @Nullable CredentialReferenceResponse credential;
        private @Nullable String description;
        private Object domain;
        private @Nullable String encryptedCredential;
        private @Nullable Object existingClusterId;
        private @Nullable Object instancePoolId;
        private @Nullable Map newClusterCustomTags;
        private @Nullable Object newClusterDriverNodeType;
        private @Nullable Object newClusterEnableElasticDisk;
        private @Nullable Object newClusterInitScripts;
        private @Nullable Object newClusterLogDestination;
        private @Nullable Object newClusterNodeType;
        private @Nullable Object newClusterNumOfWorker;
        private @Nullable Map newClusterSparkConf;
        private @Nullable Map newClusterSparkEnvVars;
        private @Nullable Object newClusterVersion;
        private @Nullable Map parameters;
        private @Nullable Object policyId;
        private String type;
        private @Nullable String version;
        private @Nullable Object workspaceResourceId;
        public Builder() {}
        public Builder(AzureDatabricksLinkedServiceResponse defaults) {
    	      Objects.requireNonNull(defaults);
    	      this.accessToken = defaults.accessToken;
    	      this.annotations = defaults.annotations;
    	      this.authentication = defaults.authentication;
    	      this.connectVia = defaults.connectVia;
    	      this.credential = defaults.credential;
    	      this.description = defaults.description;
    	      this.domain = defaults.domain;
    	      this.encryptedCredential = defaults.encryptedCredential;
    	      this.existingClusterId = defaults.existingClusterId;
    	      this.instancePoolId = defaults.instancePoolId;
    	      this.newClusterCustomTags = defaults.newClusterCustomTags;
    	      this.newClusterDriverNodeType = defaults.newClusterDriverNodeType;
    	      this.newClusterEnableElasticDisk = defaults.newClusterEnableElasticDisk;
    	      this.newClusterInitScripts = defaults.newClusterInitScripts;
    	      this.newClusterLogDestination = defaults.newClusterLogDestination;
    	      this.newClusterNodeType = defaults.newClusterNodeType;
    	      this.newClusterNumOfWorker = defaults.newClusterNumOfWorker;
    	      this.newClusterSparkConf = defaults.newClusterSparkConf;
    	      this.newClusterSparkEnvVars = defaults.newClusterSparkEnvVars;
    	      this.newClusterVersion = defaults.newClusterVersion;
    	      this.parameters = defaults.parameters;
    	      this.policyId = defaults.policyId;
    	      this.type = defaults.type;
    	      this.version = defaults.version;
    	      this.workspaceResourceId = defaults.workspaceResourceId;
        }

        @CustomType.Setter
        public Builder accessToken(@Nullable Either accessToken) {

            this.accessToken = accessToken;
            return this;
        }
        @CustomType.Setter
        public Builder annotations(@Nullable List annotations) {

            this.annotations = annotations;
            return this;
        }
        public Builder annotations(Object... annotations) {
            return annotations(List.of(annotations));
        }
        @CustomType.Setter
        public Builder authentication(@Nullable Object authentication) {

            this.authentication = authentication;
            return this;
        }
        @CustomType.Setter
        public Builder connectVia(@Nullable IntegrationRuntimeReferenceResponse connectVia) {

            this.connectVia = connectVia;
            return this;
        }
        @CustomType.Setter
        public Builder credential(@Nullable CredentialReferenceResponse credential) {

            this.credential = credential;
            return this;
        }
        @CustomType.Setter
        public Builder description(@Nullable String description) {

            this.description = description;
            return this;
        }
        @CustomType.Setter
        public Builder domain(Object domain) {
            if (domain == null) {
              throw new MissingRequiredPropertyException("AzureDatabricksLinkedServiceResponse", "domain");
            }
            this.domain = domain;
            return this;
        }
        @CustomType.Setter
        public Builder encryptedCredential(@Nullable String encryptedCredential) {

            this.encryptedCredential = encryptedCredential;
            return this;
        }
        @CustomType.Setter
        public Builder existingClusterId(@Nullable Object existingClusterId) {

            this.existingClusterId = existingClusterId;
            return this;
        }
        @CustomType.Setter
        public Builder instancePoolId(@Nullable Object instancePoolId) {

            this.instancePoolId = instancePoolId;
            return this;
        }
        @CustomType.Setter
        public Builder newClusterCustomTags(@Nullable Map newClusterCustomTags) {

            this.newClusterCustomTags = newClusterCustomTags;
            return this;
        }
        @CustomType.Setter
        public Builder newClusterDriverNodeType(@Nullable Object newClusterDriverNodeType) {

            this.newClusterDriverNodeType = newClusterDriverNodeType;
            return this;
        }
        @CustomType.Setter
        public Builder newClusterEnableElasticDisk(@Nullable Object newClusterEnableElasticDisk) {

            this.newClusterEnableElasticDisk = newClusterEnableElasticDisk;
            return this;
        }
        @CustomType.Setter
        public Builder newClusterInitScripts(@Nullable Object newClusterInitScripts) {

            this.newClusterInitScripts = newClusterInitScripts;
            return this;
        }
        @CustomType.Setter
        public Builder newClusterLogDestination(@Nullable Object newClusterLogDestination) {

            this.newClusterLogDestination = newClusterLogDestination;
            return this;
        }
        @CustomType.Setter
        public Builder newClusterNodeType(@Nullable Object newClusterNodeType) {

            this.newClusterNodeType = newClusterNodeType;
            return this;
        }
        @CustomType.Setter
        public Builder newClusterNumOfWorker(@Nullable Object newClusterNumOfWorker) {

            this.newClusterNumOfWorker = newClusterNumOfWorker;
            return this;
        }
        @CustomType.Setter
        public Builder newClusterSparkConf(@Nullable Map newClusterSparkConf) {

            this.newClusterSparkConf = newClusterSparkConf;
            return this;
        }
        @CustomType.Setter
        public Builder newClusterSparkEnvVars(@Nullable Map newClusterSparkEnvVars) {

            this.newClusterSparkEnvVars = newClusterSparkEnvVars;
            return this;
        }
        @CustomType.Setter
        public Builder newClusterVersion(@Nullable Object newClusterVersion) {

            this.newClusterVersion = newClusterVersion;
            return this;
        }
        @CustomType.Setter
        public Builder parameters(@Nullable Map parameters) {

            this.parameters = parameters;
            return this;
        }
        @CustomType.Setter
        public Builder policyId(@Nullable Object policyId) {

            this.policyId = policyId;
            return this;
        }
        @CustomType.Setter
        public Builder type(String type) {
            if (type == null) {
              throw new MissingRequiredPropertyException("AzureDatabricksLinkedServiceResponse", "type");
            }
            this.type = type;
            return this;
        }
        @CustomType.Setter
        public Builder version(@Nullable String version) {

            this.version = version;
            return this;
        }
        @CustomType.Setter
        public Builder workspaceResourceId(@Nullable Object workspaceResourceId) {

            this.workspaceResourceId = workspaceResourceId;
            return this;
        }
        public AzureDatabricksLinkedServiceResponse build() {
            final var _resultValue = new AzureDatabricksLinkedServiceResponse();
            _resultValue.accessToken = accessToken;
            _resultValue.annotations = annotations;
            _resultValue.authentication = authentication;
            _resultValue.connectVia = connectVia;
            _resultValue.credential = credential;
            _resultValue.description = description;
            _resultValue.domain = domain;
            _resultValue.encryptedCredential = encryptedCredential;
            _resultValue.existingClusterId = existingClusterId;
            _resultValue.instancePoolId = instancePoolId;
            _resultValue.newClusterCustomTags = newClusterCustomTags;
            _resultValue.newClusterDriverNodeType = newClusterDriverNodeType;
            _resultValue.newClusterEnableElasticDisk = newClusterEnableElasticDisk;
            _resultValue.newClusterInitScripts = newClusterInitScripts;
            _resultValue.newClusterLogDestination = newClusterLogDestination;
            _resultValue.newClusterNodeType = newClusterNodeType;
            _resultValue.newClusterNumOfWorker = newClusterNumOfWorker;
            _resultValue.newClusterSparkConf = newClusterSparkConf;
            _resultValue.newClusterSparkEnvVars = newClusterSparkEnvVars;
            _resultValue.newClusterVersion = newClusterVersion;
            _resultValue.parameters = parameters;
            _resultValue.policyId = policyId;
            _resultValue.type = type;
            _resultValue.version = version;
            _resultValue.workspaceResourceId = workspaceResourceId;
            return _resultValue;
        }
    }
}