All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.azurenative.datafactory.inputs.SynapseSparkJobDefinitionActivityArgs Maven / Gradle / Ivy

There is a newer version: 2.72.0
Show newest version
// *** WARNING: this file was generated by pulumi-java-gen. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***

package com.pulumi.azurenative.datafactory.inputs;

import com.pulumi.azurenative.datafactory.enums.ActivityOnInactiveMarkAs;
import com.pulumi.azurenative.datafactory.enums.ActivityState;
import com.pulumi.azurenative.datafactory.enums.ConfigurationType;
import com.pulumi.azurenative.datafactory.inputs.ActivityDependencyArgs;
import com.pulumi.azurenative.datafactory.inputs.ActivityPolicyArgs;
import com.pulumi.azurenative.datafactory.inputs.BigDataPoolParametrizationReferenceArgs;
import com.pulumi.azurenative.datafactory.inputs.LinkedServiceReferenceArgs;
import com.pulumi.azurenative.datafactory.inputs.SparkConfigurationParametrizationReferenceArgs;
import com.pulumi.azurenative.datafactory.inputs.SynapseSparkJobReferenceArgs;
import com.pulumi.azurenative.datafactory.inputs.UserPropertyArgs;
import com.pulumi.core.Either;
import com.pulumi.core.Output;
import com.pulumi.core.annotations.Import;
import com.pulumi.core.internal.Codegen;
import com.pulumi.exceptions.MissingRequiredPropertyException;
import java.lang.Object;
import java.lang.String;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import javax.annotation.Nullable;


/**
 * Execute spark job activity.
 * 
 */
public final class SynapseSparkJobDefinitionActivityArgs extends com.pulumi.resources.ResourceArgs {

    public static final SynapseSparkJobDefinitionActivityArgs Empty = new SynapseSparkJobDefinitionActivityArgs();

    /**
     * User specified arguments to SynapseSparkJobDefinitionActivity.
     * 
     */
    @Import(name="arguments")
    private @Nullable Output> arguments;

    /**
     * @return User specified arguments to SynapseSparkJobDefinitionActivity.
     * 
     */
    public Optional>> arguments() {
        return Optional.ofNullable(this.arguments);
    }

    /**
     * The fully-qualified identifier or the main class that is in the main definition file, which will override the 'className' of the spark job definition you provide. Type: string (or Expression with resultType string).
     * 
     */
    @Import(name="className")
    private @Nullable Output className;

    /**
     * @return The fully-qualified identifier or the main class that is in the main definition file, which will override the 'className' of the spark job definition you provide. Type: string (or Expression with resultType string).
     * 
     */
    public Optional> className() {
        return Optional.ofNullable(this.className);
    }

    /**
     * Spark configuration properties, which will override the 'conf' of the spark job definition you provide.
     * 
     */
    @Import(name="conf")
    private @Nullable Output conf;

    /**
     * @return Spark configuration properties, which will override the 'conf' of the spark job definition you provide.
     * 
     */
    public Optional> conf() {
        return Optional.ofNullable(this.conf);
    }

    /**
     * The type of the spark config.
     * 
     */
    @Import(name="configurationType")
    private @Nullable Output> configurationType;

    /**
     * @return The type of the spark config.
     * 
     */
    public Optional>> configurationType() {
        return Optional.ofNullable(this.configurationType);
    }

    /**
     * Activity depends on condition.
     * 
     */
    @Import(name="dependsOn")
    private @Nullable Output> dependsOn;

    /**
     * @return Activity depends on condition.
     * 
     */
    public Optional>> dependsOn() {
        return Optional.ofNullable(this.dependsOn);
    }

    /**
     * Activity description.
     * 
     */
    @Import(name="description")
    private @Nullable Output description;

    /**
     * @return Activity description.
     * 
     */
    public Optional> description() {
        return Optional.ofNullable(this.description);
    }

    /**
     * Number of core and memory to be used for driver allocated in the specified Spark pool for the job, which will be used for overriding 'driverCores' and 'driverMemory' of the spark job definition you provide. Type: string (or Expression with resultType string).
     * 
     */
    @Import(name="driverSize")
    private @Nullable Output driverSize;

    /**
     * @return Number of core and memory to be used for driver allocated in the specified Spark pool for the job, which will be used for overriding 'driverCores' and 'driverMemory' of the spark job definition you provide. Type: string (or Expression with resultType string).
     * 
     */
    public Optional> driverSize() {
        return Optional.ofNullable(this.driverSize);
    }

    /**
     * Number of core and memory to be used for executors allocated in the specified Spark pool for the job, which will be used for overriding 'executorCores' and 'executorMemory' of the spark job definition you provide. Type: string (or Expression with resultType string).
     * 
     */
    @Import(name="executorSize")
    private @Nullable Output executorSize;

    /**
     * @return Number of core and memory to be used for executors allocated in the specified Spark pool for the job, which will be used for overriding 'executorCores' and 'executorMemory' of the spark job definition you provide. Type: string (or Expression with resultType string).
     * 
     */
    public Optional> executorSize() {
        return Optional.ofNullable(this.executorSize);
    }

    /**
     * The main file used for the job, which will override the 'file' of the spark job definition you provide. Type: string (or Expression with resultType string).
     * 
     */
    @Import(name="file")
    private @Nullable Output file;

    /**
     * @return The main file used for the job, which will override the 'file' of the spark job definition you provide. Type: string (or Expression with resultType string).
     * 
     */
    public Optional> file() {
        return Optional.ofNullable(this.file);
    }

    /**
     * (Deprecated. Please use pythonCodeReference and filesV2) Additional files used for reference in the main definition file, which will override the 'files' of the spark job definition you provide.
     * 
     */
    @Import(name="files")
    private @Nullable Output> files;

    /**
     * @return (Deprecated. Please use pythonCodeReference and filesV2) Additional files used for reference in the main definition file, which will override the 'files' of the spark job definition you provide.
     * 
     */
    public Optional>> files() {
        return Optional.ofNullable(this.files);
    }

    /**
     * Additional files used for reference in the main definition file, which will override the 'jars' and 'files' of the spark job definition you provide.
     * 
     */
    @Import(name="filesV2")
    private @Nullable Output> filesV2;

    /**
     * @return Additional files used for reference in the main definition file, which will override the 'jars' and 'files' of the spark job definition you provide.
     * 
     */
    public Optional>> filesV2() {
        return Optional.ofNullable(this.filesV2);
    }

    /**
     * Linked service reference.
     * 
     */
    @Import(name="linkedServiceName")
    private @Nullable Output linkedServiceName;

    /**
     * @return Linked service reference.
     * 
     */
    public Optional> linkedServiceName() {
        return Optional.ofNullable(this.linkedServiceName);
    }

    /**
     * Activity name.
     * 
     */
    @Import(name="name", required=true)
    private Output name;

    /**
     * @return Activity name.
     * 
     */
    public Output name() {
        return this.name;
    }

    /**
     * Number of executors to launch for this job, which will override the 'numExecutors' of the spark job definition you provide. Type: integer (or Expression with resultType integer).
     * 
     */
    @Import(name="numExecutors")
    private @Nullable Output numExecutors;

    /**
     * @return Number of executors to launch for this job, which will override the 'numExecutors' of the spark job definition you provide. Type: integer (or Expression with resultType integer).
     * 
     */
    public Optional> numExecutors() {
        return Optional.ofNullable(this.numExecutors);
    }

    /**
     * Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when the activity is inactive, the status will be Succeeded by default.
     * 
     */
    @Import(name="onInactiveMarkAs")
    private @Nullable Output> onInactiveMarkAs;

    /**
     * @return Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when the activity is inactive, the status will be Succeeded by default.
     * 
     */
    public Optional>> onInactiveMarkAs() {
        return Optional.ofNullable(this.onInactiveMarkAs);
    }

    /**
     * Activity policy.
     * 
     */
    @Import(name="policy")
    private @Nullable Output policy;

    /**
     * @return Activity policy.
     * 
     */
    public Optional> policy() {
        return Optional.ofNullable(this.policy);
    }

    /**
     * Additional python code files used for reference in the main definition file, which will override the 'pyFiles' of the spark job definition you provide.
     * 
     */
    @Import(name="pythonCodeReference")
    private @Nullable Output> pythonCodeReference;

    /**
     * @return Additional python code files used for reference in the main definition file, which will override the 'pyFiles' of the spark job definition you provide.
     * 
     */
    public Optional>> pythonCodeReference() {
        return Optional.ofNullable(this.pythonCodeReference);
    }

    /**
     * Scanning subfolders from the root folder of the main definition file, these files will be added as reference files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, and the folders name are case sensitive. Type: boolean (or Expression with resultType boolean).
     * 
     */
    @Import(name="scanFolder")
    private @Nullable Output scanFolder;

    /**
     * @return Scanning subfolders from the root folder of the main definition file, these files will be added as reference files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, and the folders name are case sensitive. Type: boolean (or Expression with resultType boolean).
     * 
     */
    public Optional> scanFolder() {
        return Optional.ofNullable(this.scanFolder);
    }

    /**
     * Spark configuration property.
     * 
     */
    @Import(name="sparkConfig")
    private @Nullable Output> sparkConfig;

    /**
     * @return Spark configuration property.
     * 
     */
    public Optional>> sparkConfig() {
        return Optional.ofNullable(this.sparkConfig);
    }

    /**
     * Synapse spark job reference.
     * 
     */
    @Import(name="sparkJob", required=true)
    private Output sparkJob;

    /**
     * @return Synapse spark job reference.
     * 
     */
    public Output sparkJob() {
        return this.sparkJob;
    }

    /**
     * Activity state. This is an optional property and if not provided, the state will be Active by default.
     * 
     */
    @Import(name="state")
    private @Nullable Output> state;

    /**
     * @return Activity state. This is an optional property and if not provided, the state will be Active by default.
     * 
     */
    public Optional>> state() {
        return Optional.ofNullable(this.state);
    }

    /**
     * The name of the big data pool which will be used to execute the spark batch job, which will override the 'targetBigDataPool' of the spark job definition you provide.
     * 
     */
    @Import(name="targetBigDataPool")
    private @Nullable Output targetBigDataPool;

    /**
     * @return The name of the big data pool which will be used to execute the spark batch job, which will override the 'targetBigDataPool' of the spark job definition you provide.
     * 
     */
    public Optional> targetBigDataPool() {
        return Optional.ofNullable(this.targetBigDataPool);
    }

    /**
     * The spark configuration of the spark job.
     * 
     */
    @Import(name="targetSparkConfiguration")
    private @Nullable Output targetSparkConfiguration;

    /**
     * @return The spark configuration of the spark job.
     * 
     */
    public Optional> targetSparkConfiguration() {
        return Optional.ofNullable(this.targetSparkConfiguration);
    }

    /**
     * Type of activity.
     * Expected value is 'SparkJob'.
     * 
     */
    @Import(name="type", required=true)
    private Output type;

    /**
     * @return Type of activity.
     * Expected value is 'SparkJob'.
     * 
     */
    public Output type() {
        return this.type;
    }

    /**
     * Activity user properties.
     * 
     */
    @Import(name="userProperties")
    private @Nullable Output> userProperties;

    /**
     * @return Activity user properties.
     * 
     */
    public Optional>> userProperties() {
        return Optional.ofNullable(this.userProperties);
    }

    private SynapseSparkJobDefinitionActivityArgs() {}

    private SynapseSparkJobDefinitionActivityArgs(SynapseSparkJobDefinitionActivityArgs $) {
        this.arguments = $.arguments;
        this.className = $.className;
        this.conf = $.conf;
        this.configurationType = $.configurationType;
        this.dependsOn = $.dependsOn;
        this.description = $.description;
        this.driverSize = $.driverSize;
        this.executorSize = $.executorSize;
        this.file = $.file;
        this.files = $.files;
        this.filesV2 = $.filesV2;
        this.linkedServiceName = $.linkedServiceName;
        this.name = $.name;
        this.numExecutors = $.numExecutors;
        this.onInactiveMarkAs = $.onInactiveMarkAs;
        this.policy = $.policy;
        this.pythonCodeReference = $.pythonCodeReference;
        this.scanFolder = $.scanFolder;
        this.sparkConfig = $.sparkConfig;
        this.sparkJob = $.sparkJob;
        this.state = $.state;
        this.targetBigDataPool = $.targetBigDataPool;
        this.targetSparkConfiguration = $.targetSparkConfiguration;
        this.type = $.type;
        this.userProperties = $.userProperties;
    }

    public static Builder builder() {
        return new Builder();
    }
    public static Builder builder(SynapseSparkJobDefinitionActivityArgs defaults) {
        return new Builder(defaults);
    }

    public static final class Builder {
        private SynapseSparkJobDefinitionActivityArgs $;

        public Builder() {
            $ = new SynapseSparkJobDefinitionActivityArgs();
        }

        public Builder(SynapseSparkJobDefinitionActivityArgs defaults) {
            $ = new SynapseSparkJobDefinitionActivityArgs(Objects.requireNonNull(defaults));
        }

        /**
         * @param arguments User specified arguments to SynapseSparkJobDefinitionActivity.
         * 
         * @return builder
         * 
         */
        public Builder arguments(@Nullable Output> arguments) {
            $.arguments = arguments;
            return this;
        }

        /**
         * @param arguments User specified arguments to SynapseSparkJobDefinitionActivity.
         * 
         * @return builder
         * 
         */
        public Builder arguments(List arguments) {
            return arguments(Output.of(arguments));
        }

        /**
         * @param arguments User specified arguments to SynapseSparkJobDefinitionActivity.
         * 
         * @return builder
         * 
         */
        public Builder arguments(Object... arguments) {
            return arguments(List.of(arguments));
        }

        /**
         * @param className The fully-qualified identifier or the main class that is in the main definition file, which will override the 'className' of the spark job definition you provide. Type: string (or Expression with resultType string).
         * 
         * @return builder
         * 
         */
        public Builder className(@Nullable Output className) {
            $.className = className;
            return this;
        }

        /**
         * @param className The fully-qualified identifier or the main class that is in the main definition file, which will override the 'className' of the spark job definition you provide. Type: string (or Expression with resultType string).
         * 
         * @return builder
         * 
         */
        public Builder className(Object className) {
            return className(Output.of(className));
        }

        /**
         * @param conf Spark configuration properties, which will override the 'conf' of the spark job definition you provide.
         * 
         * @return builder
         * 
         */
        public Builder conf(@Nullable Output conf) {
            $.conf = conf;
            return this;
        }

        /**
         * @param conf Spark configuration properties, which will override the 'conf' of the spark job definition you provide.
         * 
         * @return builder
         * 
         */
        public Builder conf(Object conf) {
            return conf(Output.of(conf));
        }

        /**
         * @param configurationType The type of the spark config.
         * 
         * @return builder
         * 
         */
        public Builder configurationType(@Nullable Output> configurationType) {
            $.configurationType = configurationType;
            return this;
        }

        /**
         * @param configurationType The type of the spark config.
         * 
         * @return builder
         * 
         */
        public Builder configurationType(Either configurationType) {
            return configurationType(Output.of(configurationType));
        }

        /**
         * @param configurationType The type of the spark config.
         * 
         * @return builder
         * 
         */
        public Builder configurationType(String configurationType) {
            return configurationType(Either.ofLeft(configurationType));
        }

        /**
         * @param configurationType The type of the spark config.
         * 
         * @return builder
         * 
         */
        public Builder configurationType(ConfigurationType configurationType) {
            return configurationType(Either.ofRight(configurationType));
        }

        /**
         * @param dependsOn Activity depends on condition.
         * 
         * @return builder
         * 
         */
        public Builder dependsOn(@Nullable Output> dependsOn) {
            $.dependsOn = dependsOn;
            return this;
        }

        /**
         * @param dependsOn Activity depends on condition.
         * 
         * @return builder
         * 
         */
        public Builder dependsOn(List dependsOn) {
            return dependsOn(Output.of(dependsOn));
        }

        /**
         * @param dependsOn Activity depends on condition.
         * 
         * @return builder
         * 
         */
        public Builder dependsOn(ActivityDependencyArgs... dependsOn) {
            return dependsOn(List.of(dependsOn));
        }

        /**
         * @param description Activity description.
         * 
         * @return builder
         * 
         */
        public Builder description(@Nullable Output description) {
            $.description = description;
            return this;
        }

        /**
         * @param description Activity description.
         * 
         * @return builder
         * 
         */
        public Builder description(String description) {
            return description(Output.of(description));
        }

        /**
         * @param driverSize Number of core and memory to be used for driver allocated in the specified Spark pool for the job, which will be used for overriding 'driverCores' and 'driverMemory' of the spark job definition you provide. Type: string (or Expression with resultType string).
         * 
         * @return builder
         * 
         */
        public Builder driverSize(@Nullable Output driverSize) {
            $.driverSize = driverSize;
            return this;
        }

        /**
         * @param driverSize Number of core and memory to be used for driver allocated in the specified Spark pool for the job, which will be used for overriding 'driverCores' and 'driverMemory' of the spark job definition you provide. Type: string (or Expression with resultType string).
         * 
         * @return builder
         * 
         */
        public Builder driverSize(Object driverSize) {
            return driverSize(Output.of(driverSize));
        }

        /**
         * @param executorSize Number of core and memory to be used for executors allocated in the specified Spark pool for the job, which will be used for overriding 'executorCores' and 'executorMemory' of the spark job definition you provide. Type: string (or Expression with resultType string).
         * 
         * @return builder
         * 
         */
        public Builder executorSize(@Nullable Output executorSize) {
            $.executorSize = executorSize;
            return this;
        }

        /**
         * @param executorSize Number of core and memory to be used for executors allocated in the specified Spark pool for the job, which will be used for overriding 'executorCores' and 'executorMemory' of the spark job definition you provide. Type: string (or Expression with resultType string).
         * 
         * @return builder
         * 
         */
        public Builder executorSize(Object executorSize) {
            return executorSize(Output.of(executorSize));
        }

        /**
         * @param file The main file used for the job, which will override the 'file' of the spark job definition you provide. Type: string (or Expression with resultType string).
         * 
         * @return builder
         * 
         */
        public Builder file(@Nullable Output file) {
            $.file = file;
            return this;
        }

        /**
         * @param file The main file used for the job, which will override the 'file' of the spark job definition you provide. Type: string (or Expression with resultType string).
         * 
         * @return builder
         * 
         */
        public Builder file(Object file) {
            return file(Output.of(file));
        }

        /**
         * @param files (Deprecated. Please use pythonCodeReference and filesV2) Additional files used for reference in the main definition file, which will override the 'files' of the spark job definition you provide.
         * 
         * @return builder
         * 
         */
        public Builder files(@Nullable Output> files) {
            $.files = files;
            return this;
        }

        /**
         * @param files (Deprecated. Please use pythonCodeReference and filesV2) Additional files used for reference in the main definition file, which will override the 'files' of the spark job definition you provide.
         * 
         * @return builder
         * 
         */
        public Builder files(List files) {
            return files(Output.of(files));
        }

        /**
         * @param files (Deprecated. Please use pythonCodeReference and filesV2) Additional files used for reference in the main definition file, which will override the 'files' of the spark job definition you provide.
         * 
         * @return builder
         * 
         */
        public Builder files(Object... files) {
            return files(List.of(files));
        }

        /**
         * @param filesV2 Additional files used for reference in the main definition file, which will override the 'jars' and 'files' of the spark job definition you provide.
         * 
         * @return builder
         * 
         */
        public Builder filesV2(@Nullable Output> filesV2) {
            $.filesV2 = filesV2;
            return this;
        }

        /**
         * @param filesV2 Additional files used for reference in the main definition file, which will override the 'jars' and 'files' of the spark job definition you provide.
         * 
         * @return builder
         * 
         */
        public Builder filesV2(List filesV2) {
            return filesV2(Output.of(filesV2));
        }

        /**
         * @param filesV2 Additional files used for reference in the main definition file, which will override the 'jars' and 'files' of the spark job definition you provide.
         * 
         * @return builder
         * 
         */
        public Builder filesV2(Object... filesV2) {
            return filesV2(List.of(filesV2));
        }

        /**
         * @param linkedServiceName Linked service reference.
         * 
         * @return builder
         * 
         */
        public Builder linkedServiceName(@Nullable Output linkedServiceName) {
            $.linkedServiceName = linkedServiceName;
            return this;
        }

        /**
         * @param linkedServiceName Linked service reference.
         * 
         * @return builder
         * 
         */
        public Builder linkedServiceName(LinkedServiceReferenceArgs linkedServiceName) {
            return linkedServiceName(Output.of(linkedServiceName));
        }

        /**
         * @param name Activity name.
         * 
         * @return builder
         * 
         */
        public Builder name(Output name) {
            $.name = name;
            return this;
        }

        /**
         * @param name Activity name.
         * 
         * @return builder
         * 
         */
        public Builder name(String name) {
            return name(Output.of(name));
        }

        /**
         * @param numExecutors Number of executors to launch for this job, which will override the 'numExecutors' of the spark job definition you provide. Type: integer (or Expression with resultType integer).
         * 
         * @return builder
         * 
         */
        public Builder numExecutors(@Nullable Output numExecutors) {
            $.numExecutors = numExecutors;
            return this;
        }

        /**
         * @param numExecutors Number of executors to launch for this job, which will override the 'numExecutors' of the spark job definition you provide. Type: integer (or Expression with resultType integer).
         * 
         * @return builder
         * 
         */
        public Builder numExecutors(Object numExecutors) {
            return numExecutors(Output.of(numExecutors));
        }

        /**
         * @param onInactiveMarkAs Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when the activity is inactive, the status will be Succeeded by default.
         * 
         * @return builder
         * 
         */
        public Builder onInactiveMarkAs(@Nullable Output> onInactiveMarkAs) {
            $.onInactiveMarkAs = onInactiveMarkAs;
            return this;
        }

        /**
         * @param onInactiveMarkAs Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when the activity is inactive, the status will be Succeeded by default.
         * 
         * @return builder
         * 
         */
        public Builder onInactiveMarkAs(Either onInactiveMarkAs) {
            return onInactiveMarkAs(Output.of(onInactiveMarkAs));
        }

        /**
         * @param onInactiveMarkAs Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when the activity is inactive, the status will be Succeeded by default.
         * 
         * @return builder
         * 
         */
        public Builder onInactiveMarkAs(String onInactiveMarkAs) {
            return onInactiveMarkAs(Either.ofLeft(onInactiveMarkAs));
        }

        /**
         * @param onInactiveMarkAs Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when the activity is inactive, the status will be Succeeded by default.
         * 
         * @return builder
         * 
         */
        public Builder onInactiveMarkAs(ActivityOnInactiveMarkAs onInactiveMarkAs) {
            return onInactiveMarkAs(Either.ofRight(onInactiveMarkAs));
        }

        /**
         * @param policy Activity policy.
         * 
         * @return builder
         * 
         */
        public Builder policy(@Nullable Output policy) {
            $.policy = policy;
            return this;
        }

        /**
         * @param policy Activity policy.
         * 
         * @return builder
         * 
         */
        public Builder policy(ActivityPolicyArgs policy) {
            return policy(Output.of(policy));
        }

        /**
         * @param pythonCodeReference Additional python code files used for reference in the main definition file, which will override the 'pyFiles' of the spark job definition you provide.
         * 
         * @return builder
         * 
         */
        public Builder pythonCodeReference(@Nullable Output> pythonCodeReference) {
            $.pythonCodeReference = pythonCodeReference;
            return this;
        }

        /**
         * @param pythonCodeReference Additional python code files used for reference in the main definition file, which will override the 'pyFiles' of the spark job definition you provide.
         * 
         * @return builder
         * 
         */
        public Builder pythonCodeReference(List pythonCodeReference) {
            return pythonCodeReference(Output.of(pythonCodeReference));
        }

        /**
         * @param pythonCodeReference Additional python code files used for reference in the main definition file, which will override the 'pyFiles' of the spark job definition you provide.
         * 
         * @return builder
         * 
         */
        public Builder pythonCodeReference(Object... pythonCodeReference) {
            return pythonCodeReference(List.of(pythonCodeReference));
        }

        /**
         * @param scanFolder Scanning subfolders from the root folder of the main definition file, these files will be added as reference files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, and the folders name are case sensitive. Type: boolean (or Expression with resultType boolean).
         * 
         * @return builder
         * 
         */
        public Builder scanFolder(@Nullable Output scanFolder) {
            $.scanFolder = scanFolder;
            return this;
        }

        /**
         * @param scanFolder Scanning subfolders from the root folder of the main definition file, these files will be added as reference files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, and the folders name are case sensitive. Type: boolean (or Expression with resultType boolean).
         * 
         * @return builder
         * 
         */
        public Builder scanFolder(Object scanFolder) {
            return scanFolder(Output.of(scanFolder));
        }

        /**
         * @param sparkConfig Spark configuration property.
         * 
         * @return builder
         * 
         */
        public Builder sparkConfig(@Nullable Output> sparkConfig) {
            $.sparkConfig = sparkConfig;
            return this;
        }

        /**
         * @param sparkConfig Spark configuration property.
         * 
         * @return builder
         * 
         */
        public Builder sparkConfig(Map sparkConfig) {
            return sparkConfig(Output.of(sparkConfig));
        }

        /**
         * @param sparkJob Synapse spark job reference.
         * 
         * @return builder
         * 
         */
        public Builder sparkJob(Output sparkJob) {
            $.sparkJob = sparkJob;
            return this;
        }

        /**
         * @param sparkJob Synapse spark job reference.
         * 
         * @return builder
         * 
         */
        public Builder sparkJob(SynapseSparkJobReferenceArgs sparkJob) {
            return sparkJob(Output.of(sparkJob));
        }

        /**
         * @param state Activity state. This is an optional property and if not provided, the state will be Active by default.
         * 
         * @return builder
         * 
         */
        public Builder state(@Nullable Output> state) {
            $.state = state;
            return this;
        }

        /**
         * @param state Activity state. This is an optional property and if not provided, the state will be Active by default.
         * 
         * @return builder
         * 
         */
        public Builder state(Either state) {
            return state(Output.of(state));
        }

        /**
         * @param state Activity state. This is an optional property and if not provided, the state will be Active by default.
         * 
         * @return builder
         * 
         */
        public Builder state(String state) {
            return state(Either.ofLeft(state));
        }

        /**
         * @param state Activity state. This is an optional property and if not provided, the state will be Active by default.
         * 
         * @return builder
         * 
         */
        public Builder state(ActivityState state) {
            return state(Either.ofRight(state));
        }

        /**
         * @param targetBigDataPool The name of the big data pool which will be used to execute the spark batch job, which will override the 'targetBigDataPool' of the spark job definition you provide.
         * 
         * @return builder
         * 
         */
        public Builder targetBigDataPool(@Nullable Output targetBigDataPool) {
            $.targetBigDataPool = targetBigDataPool;
            return this;
        }

        /**
         * @param targetBigDataPool The name of the big data pool which will be used to execute the spark batch job, which will override the 'targetBigDataPool' of the spark job definition you provide.
         * 
         * @return builder
         * 
         */
        public Builder targetBigDataPool(BigDataPoolParametrizationReferenceArgs targetBigDataPool) {
            return targetBigDataPool(Output.of(targetBigDataPool));
        }

        /**
         * @param targetSparkConfiguration The spark configuration of the spark job.
         * 
         * @return builder
         * 
         */
        public Builder targetSparkConfiguration(@Nullable Output targetSparkConfiguration) {
            $.targetSparkConfiguration = targetSparkConfiguration;
            return this;
        }

        /**
         * @param targetSparkConfiguration The spark configuration of the spark job.
         * 
         * @return builder
         * 
         */
        public Builder targetSparkConfiguration(SparkConfigurationParametrizationReferenceArgs targetSparkConfiguration) {
            return targetSparkConfiguration(Output.of(targetSparkConfiguration));
        }

        /**
         * @param type Type of activity.
         * Expected value is 'SparkJob'.
         * 
         * @return builder
         * 
         */
        public Builder type(Output type) {
            $.type = type;
            return this;
        }

        /**
         * @param type Type of activity.
         * Expected value is 'SparkJob'.
         * 
         * @return builder
         * 
         */
        public Builder type(String type) {
            return type(Output.of(type));
        }

        /**
         * @param userProperties Activity user properties.
         * 
         * @return builder
         * 
         */
        public Builder userProperties(@Nullable Output> userProperties) {
            $.userProperties = userProperties;
            return this;
        }

        /**
         * @param userProperties Activity user properties.
         * 
         * @return builder
         * 
         */
        public Builder userProperties(List userProperties) {
            return userProperties(Output.of(userProperties));
        }

        /**
         * @param userProperties Activity user properties.
         * 
         * @return builder
         * 
         */
        public Builder userProperties(UserPropertyArgs... userProperties) {
            return userProperties(List.of(userProperties));
        }

        public SynapseSparkJobDefinitionActivityArgs build() {
            if ($.name == null) {
                throw new MissingRequiredPropertyException("SynapseSparkJobDefinitionActivityArgs", "name");
            }
            if ($.sparkJob == null) {
                throw new MissingRequiredPropertyException("SynapseSparkJobDefinitionActivityArgs", "sparkJob");
            }
            $.type = Codegen.stringProp("type").output().arg($.type).require();
            return $;
        }
    }

}