All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.azurenative.datafactory.outputs.SynapseSparkJobDefinitionActivityResponse Maven / Gradle / Ivy

There is a newer version: 2.72.0
Show newest version
// *** WARNING: this file was generated by pulumi-java-gen. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***

package com.pulumi.azurenative.datafactory.outputs;

import com.pulumi.azurenative.datafactory.outputs.ActivityDependencyResponse;
import com.pulumi.azurenative.datafactory.outputs.ActivityPolicyResponse;
import com.pulumi.azurenative.datafactory.outputs.BigDataPoolParametrizationReferenceResponse;
import com.pulumi.azurenative.datafactory.outputs.LinkedServiceReferenceResponse;
import com.pulumi.azurenative.datafactory.outputs.SparkConfigurationParametrizationReferenceResponse;
import com.pulumi.azurenative.datafactory.outputs.SynapseSparkJobReferenceResponse;
import com.pulumi.azurenative.datafactory.outputs.UserPropertyResponse;
import com.pulumi.core.annotations.CustomType;
import com.pulumi.exceptions.MissingRequiredPropertyException;
import java.lang.Object;
import java.lang.String;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import javax.annotation.Nullable;

@CustomType
public final class SynapseSparkJobDefinitionActivityResponse {
    /**
     * @return User specified arguments to SynapseSparkJobDefinitionActivity.
     * 
     */
    private @Nullable List arguments;
    /**
     * @return The fully-qualified identifier or the main class that is in the main definition file, which will override the 'className' of the spark job definition you provide. Type: string (or Expression with resultType string).
     * 
     */
    private @Nullable Object className;
    /**
     * @return Spark configuration properties, which will override the 'conf' of the spark job definition you provide.
     * 
     */
    private @Nullable Object conf;
    /**
     * @return The type of the spark config.
     * 
     */
    private @Nullable String configurationType;
    /**
     * @return Activity depends on condition.
     * 
     */
    private @Nullable List dependsOn;
    /**
     * @return Activity description.
     * 
     */
    private @Nullable String description;
    /**
     * @return Number of core and memory to be used for driver allocated in the specified Spark pool for the job, which will be used for overriding 'driverCores' and 'driverMemory' of the spark job definition you provide. Type: string (or Expression with resultType string).
     * 
     */
    private @Nullable Object driverSize;
    /**
     * @return Number of core and memory to be used for executors allocated in the specified Spark pool for the job, which will be used for overriding 'executorCores' and 'executorMemory' of the spark job definition you provide. Type: string (or Expression with resultType string).
     * 
     */
    private @Nullable Object executorSize;
    /**
     * @return The main file used for the job, which will override the 'file' of the spark job definition you provide. Type: string (or Expression with resultType string).
     * 
     */
    private @Nullable Object file;
    /**
     * @return (Deprecated. Please use pythonCodeReference and filesV2) Additional files used for reference in the main definition file, which will override the 'files' of the spark job definition you provide.
     * 
     */
    private @Nullable List files;
    /**
     * @return Additional files used for reference in the main definition file, which will override the 'jars' and 'files' of the spark job definition you provide.
     * 
     */
    private @Nullable List filesV2;
    /**
     * @return Linked service reference.
     * 
     */
    private @Nullable LinkedServiceReferenceResponse linkedServiceName;
    /**
     * @return Activity name.
     * 
     */
    private String name;
    /**
     * @return Number of executors to launch for this job, which will override the 'numExecutors' of the spark job definition you provide. Type: integer (or Expression with resultType integer).
     * 
     */
    private @Nullable Object numExecutors;
    /**
     * @return Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when the activity is inactive, the status will be Succeeded by default.
     * 
     */
    private @Nullable String onInactiveMarkAs;
    /**
     * @return Activity policy.
     * 
     */
    private @Nullable ActivityPolicyResponse policy;
    /**
     * @return Additional python code files used for reference in the main definition file, which will override the 'pyFiles' of the spark job definition you provide.
     * 
     */
    private @Nullable List pythonCodeReference;
    /**
     * @return Scanning subfolders from the root folder of the main definition file, these files will be added as reference files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, and the folders name are case sensitive. Type: boolean (or Expression with resultType boolean).
     * 
     */
    private @Nullable Object scanFolder;
    /**
     * @return Spark configuration property.
     * 
     */
    private @Nullable Map sparkConfig;
    /**
     * @return Synapse spark job reference.
     * 
     */
    private SynapseSparkJobReferenceResponse sparkJob;
    /**
     * @return Activity state. This is an optional property and if not provided, the state will be Active by default.
     * 
     */
    private @Nullable String state;
    /**
     * @return The name of the big data pool which will be used to execute the spark batch job, which will override the 'targetBigDataPool' of the spark job definition you provide.
     * 
     */
    private @Nullable BigDataPoolParametrizationReferenceResponse targetBigDataPool;
    /**
     * @return The spark configuration of the spark job.
     * 
     */
    private @Nullable SparkConfigurationParametrizationReferenceResponse targetSparkConfiguration;
    /**
     * @return Type of activity.
     * Expected value is 'SparkJob'.
     * 
     */
    private String type;
    /**
     * @return Activity user properties.
     * 
     */
    private @Nullable List userProperties;

    private SynapseSparkJobDefinitionActivityResponse() {}
    /**
     * @return User specified arguments to SynapseSparkJobDefinitionActivity.
     * 
     */
    public List arguments() {
        return this.arguments == null ? List.of() : this.arguments;
    }
    /**
     * @return The fully-qualified identifier or the main class that is in the main definition file, which will override the 'className' of the spark job definition you provide. Type: string (or Expression with resultType string).
     * 
     */
    public Optional className() {
        return Optional.ofNullable(this.className);
    }
    /**
     * @return Spark configuration properties, which will override the 'conf' of the spark job definition you provide.
     * 
     */
    public Optional conf() {
        return Optional.ofNullable(this.conf);
    }
    /**
     * @return The type of the spark config.
     * 
     */
    public Optional configurationType() {
        return Optional.ofNullable(this.configurationType);
    }
    /**
     * @return Activity depends on condition.
     * 
     */
    public List dependsOn() {
        return this.dependsOn == null ? List.of() : this.dependsOn;
    }
    /**
     * @return Activity description.
     * 
     */
    public Optional description() {
        return Optional.ofNullable(this.description);
    }
    /**
     * @return Number of core and memory to be used for driver allocated in the specified Spark pool for the job, which will be used for overriding 'driverCores' and 'driverMemory' of the spark job definition you provide. Type: string (or Expression with resultType string).
     * 
     */
    public Optional driverSize() {
        return Optional.ofNullable(this.driverSize);
    }
    /**
     * @return Number of core and memory to be used for executors allocated in the specified Spark pool for the job, which will be used for overriding 'executorCores' and 'executorMemory' of the spark job definition you provide. Type: string (or Expression with resultType string).
     * 
     */
    public Optional executorSize() {
        return Optional.ofNullable(this.executorSize);
    }
    /**
     * @return The main file used for the job, which will override the 'file' of the spark job definition you provide. Type: string (or Expression with resultType string).
     * 
     */
    public Optional file() {
        return Optional.ofNullable(this.file);
    }
    /**
     * @return (Deprecated. Please use pythonCodeReference and filesV2) Additional files used for reference in the main definition file, which will override the 'files' of the spark job definition you provide.
     * 
     */
    public List files() {
        return this.files == null ? List.of() : this.files;
    }
    /**
     * @return Additional files used for reference in the main definition file, which will override the 'jars' and 'files' of the spark job definition you provide.
     * 
     */
    public List filesV2() {
        return this.filesV2 == null ? List.of() : this.filesV2;
    }
    /**
     * @return Linked service reference.
     * 
     */
    public Optional linkedServiceName() {
        return Optional.ofNullable(this.linkedServiceName);
    }
    /**
     * @return Activity name.
     * 
     */
    public String name() {
        return this.name;
    }
    /**
     * @return Number of executors to launch for this job, which will override the 'numExecutors' of the spark job definition you provide. Type: integer (or Expression with resultType integer).
     * 
     */
    public Optional numExecutors() {
        return Optional.ofNullable(this.numExecutors);
    }
    /**
     * @return Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when the activity is inactive, the status will be Succeeded by default.
     * 
     */
    public Optional onInactiveMarkAs() {
        return Optional.ofNullable(this.onInactiveMarkAs);
    }
    /**
     * @return Activity policy.
     * 
     */
    public Optional policy() {
        return Optional.ofNullable(this.policy);
    }
    /**
     * @return Additional python code files used for reference in the main definition file, which will override the 'pyFiles' of the spark job definition you provide.
     * 
     */
    public List pythonCodeReference() {
        return this.pythonCodeReference == null ? List.of() : this.pythonCodeReference;
    }
    /**
     * @return Scanning subfolders from the root folder of the main definition file, these files will be added as reference files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, and the folders name are case sensitive. Type: boolean (or Expression with resultType boolean).
     * 
     */
    public Optional scanFolder() {
        return Optional.ofNullable(this.scanFolder);
    }
    /**
     * @return Spark configuration property.
     * 
     */
    public Map sparkConfig() {
        return this.sparkConfig == null ? Map.of() : this.sparkConfig;
    }
    /**
     * @return Synapse spark job reference.
     * 
     */
    public SynapseSparkJobReferenceResponse sparkJob() {
        return this.sparkJob;
    }
    /**
     * @return Activity state. This is an optional property and if not provided, the state will be Active by default.
     * 
     */
    public Optional state() {
        return Optional.ofNullable(this.state);
    }
    /**
     * @return The name of the big data pool which will be used to execute the spark batch job, which will override the 'targetBigDataPool' of the spark job definition you provide.
     * 
     */
    public Optional targetBigDataPool() {
        return Optional.ofNullable(this.targetBigDataPool);
    }
    /**
     * @return The spark configuration of the spark job.
     * 
     */
    public Optional targetSparkConfiguration() {
        return Optional.ofNullable(this.targetSparkConfiguration);
    }
    /**
     * @return Type of activity.
     * Expected value is 'SparkJob'.
     * 
     */
    public String type() {
        return this.type;
    }
    /**
     * @return Activity user properties.
     * 
     */
    public List userProperties() {
        return this.userProperties == null ? List.of() : this.userProperties;
    }

    public static Builder builder() {
        return new Builder();
    }

    public static Builder builder(SynapseSparkJobDefinitionActivityResponse defaults) {
        return new Builder(defaults);
    }
    @CustomType.Builder
    public static final class Builder {
        private @Nullable List arguments;
        private @Nullable Object className;
        private @Nullable Object conf;
        private @Nullable String configurationType;
        private @Nullable List dependsOn;
        private @Nullable String description;
        private @Nullable Object driverSize;
        private @Nullable Object executorSize;
        private @Nullable Object file;
        private @Nullable List files;
        private @Nullable List filesV2;
        private @Nullable LinkedServiceReferenceResponse linkedServiceName;
        private String name;
        private @Nullable Object numExecutors;
        private @Nullable String onInactiveMarkAs;
        private @Nullable ActivityPolicyResponse policy;
        private @Nullable List pythonCodeReference;
        private @Nullable Object scanFolder;
        private @Nullable Map sparkConfig;
        private SynapseSparkJobReferenceResponse sparkJob;
        private @Nullable String state;
        private @Nullable BigDataPoolParametrizationReferenceResponse targetBigDataPool;
        private @Nullable SparkConfigurationParametrizationReferenceResponse targetSparkConfiguration;
        private String type;
        private @Nullable List userProperties;
        public Builder() {}
        public Builder(SynapseSparkJobDefinitionActivityResponse defaults) {
    	      Objects.requireNonNull(defaults);
    	      this.arguments = defaults.arguments;
    	      this.className = defaults.className;
    	      this.conf = defaults.conf;
    	      this.configurationType = defaults.configurationType;
    	      this.dependsOn = defaults.dependsOn;
    	      this.description = defaults.description;
    	      this.driverSize = defaults.driverSize;
    	      this.executorSize = defaults.executorSize;
    	      this.file = defaults.file;
    	      this.files = defaults.files;
    	      this.filesV2 = defaults.filesV2;
    	      this.linkedServiceName = defaults.linkedServiceName;
    	      this.name = defaults.name;
    	      this.numExecutors = defaults.numExecutors;
    	      this.onInactiveMarkAs = defaults.onInactiveMarkAs;
    	      this.policy = defaults.policy;
    	      this.pythonCodeReference = defaults.pythonCodeReference;
    	      this.scanFolder = defaults.scanFolder;
    	      this.sparkConfig = defaults.sparkConfig;
    	      this.sparkJob = defaults.sparkJob;
    	      this.state = defaults.state;
    	      this.targetBigDataPool = defaults.targetBigDataPool;
    	      this.targetSparkConfiguration = defaults.targetSparkConfiguration;
    	      this.type = defaults.type;
    	      this.userProperties = defaults.userProperties;
        }

        @CustomType.Setter
        public Builder arguments(@Nullable List arguments) {

            this.arguments = arguments;
            return this;
        }
        public Builder arguments(Object... arguments) {
            return arguments(List.of(arguments));
        }
        @CustomType.Setter
        public Builder className(@Nullable Object className) {

            this.className = className;
            return this;
        }
        @CustomType.Setter
        public Builder conf(@Nullable Object conf) {

            this.conf = conf;
            return this;
        }
        @CustomType.Setter
        public Builder configurationType(@Nullable String configurationType) {

            this.configurationType = configurationType;
            return this;
        }
        @CustomType.Setter
        public Builder dependsOn(@Nullable List dependsOn) {

            this.dependsOn = dependsOn;
            return this;
        }
        public Builder dependsOn(ActivityDependencyResponse... dependsOn) {
            return dependsOn(List.of(dependsOn));
        }
        @CustomType.Setter
        public Builder description(@Nullable String description) {

            this.description = description;
            return this;
        }
        @CustomType.Setter
        public Builder driverSize(@Nullable Object driverSize) {

            this.driverSize = driverSize;
            return this;
        }
        @CustomType.Setter
        public Builder executorSize(@Nullable Object executorSize) {

            this.executorSize = executorSize;
            return this;
        }
        @CustomType.Setter
        public Builder file(@Nullable Object file) {

            this.file = file;
            return this;
        }
        @CustomType.Setter
        public Builder files(@Nullable List files) {

            this.files = files;
            return this;
        }
        public Builder files(Object... files) {
            return files(List.of(files));
        }
        @CustomType.Setter
        public Builder filesV2(@Nullable List filesV2) {

            this.filesV2 = filesV2;
            return this;
        }
        public Builder filesV2(Object... filesV2) {
            return filesV2(List.of(filesV2));
        }
        @CustomType.Setter
        public Builder linkedServiceName(@Nullable LinkedServiceReferenceResponse linkedServiceName) {

            this.linkedServiceName = linkedServiceName;
            return this;
        }
        @CustomType.Setter
        public Builder name(String name) {
            if (name == null) {
              throw new MissingRequiredPropertyException("SynapseSparkJobDefinitionActivityResponse", "name");
            }
            this.name = name;
            return this;
        }
        @CustomType.Setter
        public Builder numExecutors(@Nullable Object numExecutors) {

            this.numExecutors = numExecutors;
            return this;
        }
        @CustomType.Setter
        public Builder onInactiveMarkAs(@Nullable String onInactiveMarkAs) {

            this.onInactiveMarkAs = onInactiveMarkAs;
            return this;
        }
        @CustomType.Setter
        public Builder policy(@Nullable ActivityPolicyResponse policy) {

            this.policy = policy;
            return this;
        }
        @CustomType.Setter
        public Builder pythonCodeReference(@Nullable List pythonCodeReference) {

            this.pythonCodeReference = pythonCodeReference;
            return this;
        }
        public Builder pythonCodeReference(Object... pythonCodeReference) {
            return pythonCodeReference(List.of(pythonCodeReference));
        }
        @CustomType.Setter
        public Builder scanFolder(@Nullable Object scanFolder) {

            this.scanFolder = scanFolder;
            return this;
        }
        @CustomType.Setter
        public Builder sparkConfig(@Nullable Map sparkConfig) {

            this.sparkConfig = sparkConfig;
            return this;
        }
        @CustomType.Setter
        public Builder sparkJob(SynapseSparkJobReferenceResponse sparkJob) {
            if (sparkJob == null) {
              throw new MissingRequiredPropertyException("SynapseSparkJobDefinitionActivityResponse", "sparkJob");
            }
            this.sparkJob = sparkJob;
            return this;
        }
        @CustomType.Setter
        public Builder state(@Nullable String state) {

            this.state = state;
            return this;
        }
        @CustomType.Setter
        public Builder targetBigDataPool(@Nullable BigDataPoolParametrizationReferenceResponse targetBigDataPool) {

            this.targetBigDataPool = targetBigDataPool;
            return this;
        }
        @CustomType.Setter
        public Builder targetSparkConfiguration(@Nullable SparkConfigurationParametrizationReferenceResponse targetSparkConfiguration) {

            this.targetSparkConfiguration = targetSparkConfiguration;
            return this;
        }
        @CustomType.Setter
        public Builder type(String type) {
            if (type == null) {
              throw new MissingRequiredPropertyException("SynapseSparkJobDefinitionActivityResponse", "type");
            }
            this.type = type;
            return this;
        }
        @CustomType.Setter
        public Builder userProperties(@Nullable List userProperties) {

            this.userProperties = userProperties;
            return this;
        }
        public Builder userProperties(UserPropertyResponse... userProperties) {
            return userProperties(List.of(userProperties));
        }
        public SynapseSparkJobDefinitionActivityResponse build() {
            final var _resultValue = new SynapseSparkJobDefinitionActivityResponse();
            _resultValue.arguments = arguments;
            _resultValue.className = className;
            _resultValue.conf = conf;
            _resultValue.configurationType = configurationType;
            _resultValue.dependsOn = dependsOn;
            _resultValue.description = description;
            _resultValue.driverSize = driverSize;
            _resultValue.executorSize = executorSize;
            _resultValue.file = file;
            _resultValue.files = files;
            _resultValue.filesV2 = filesV2;
            _resultValue.linkedServiceName = linkedServiceName;
            _resultValue.name = name;
            _resultValue.numExecutors = numExecutors;
            _resultValue.onInactiveMarkAs = onInactiveMarkAs;
            _resultValue.policy = policy;
            _resultValue.pythonCodeReference = pythonCodeReference;
            _resultValue.scanFolder = scanFolder;
            _resultValue.sparkConfig = sparkConfig;
            _resultValue.sparkJob = sparkJob;
            _resultValue.state = state;
            _resultValue.targetBigDataPool = targetBigDataPool;
            _resultValue.targetSparkConfiguration = targetSparkConfiguration;
            _resultValue.type = type;
            _resultValue.userProperties = userProperties;
            return _resultValue;
        }
    }
}