All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.azurenative.datafactory.kotlin.outputs.SynapseSparkJobDefinitionActivityResponse.kt Maven / Gradle / Ivy

@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.azurenative.datafactory.kotlin.outputs

import kotlin.Any
import kotlin.String
import kotlin.Suppress
import kotlin.collections.List
import kotlin.collections.Map

/**
 * Execute spark job activity.
 * @property arguments User specified arguments to SynapseSparkJobDefinitionActivity.
 * @property className The fully-qualified identifier or the main class that is in the main definition file, which will override the 'className' of the spark job definition you provide. Type: string (or Expression with resultType string).
 * @property conf Spark configuration properties, which will override the 'conf' of the spark job definition you provide.
 * @property configurationType The type of the spark config.
 * @property dependsOn Activity depends on condition.
 * @property description Activity description.
 * @property driverSize Number of core and memory to be used for driver allocated in the specified Spark pool for the job, which will be used for overriding 'driverCores' and 'driverMemory' of the spark job definition you provide. Type: string (or Expression with resultType string).
 * @property executorSize Number of core and memory to be used for executors allocated in the specified Spark pool for the job, which will be used for overriding 'executorCores' and 'executorMemory' of the spark job definition you provide. Type: string (or Expression with resultType string).
 * @property file The main file used for the job, which will override the 'file' of the spark job definition you provide. Type: string (or Expression with resultType string).
 * @property files (Deprecated. Please use pythonCodeReference and filesV2) Additional files used for reference in the main definition file, which will override the 'files' of the spark job definition you provide.
 * @property filesV2 Additional files used for reference in the main definition file, which will override the 'jars' and 'files' of the spark job definition you provide.
 * @property linkedServiceName Linked service reference.
 * @property name Activity name.
 * @property numExecutors Number of executors to launch for this job, which will override the 'numExecutors' of the spark job definition you provide. Type: integer (or Expression with resultType integer).
 * @property onInactiveMarkAs Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when the activity is inactive, the status will be Succeeded by default.
 * @property policy Activity policy.
 * @property pythonCodeReference Additional python code files used for reference in the main definition file, which will override the 'pyFiles' of the spark job definition you provide.
 * @property scanFolder Scanning subfolders from the root folder of the main definition file, these files will be added as reference files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, and the folders name are case sensitive. Type: boolean (or Expression with resultType boolean).
 * @property sparkConfig Spark configuration property.
 * @property sparkJob Synapse spark job reference.
 * @property state Activity state. This is an optional property and if not provided, the state will be Active by default.
 * @property targetBigDataPool The name of the big data pool which will be used to execute the spark batch job, which will override the 'targetBigDataPool' of the spark job definition you provide.
 * @property targetSparkConfiguration The spark configuration of the spark job.
 * @property type Type of activity.
 * Expected value is 'SparkJob'.
 * @property userProperties Activity user properties.
 */
public data class SynapseSparkJobDefinitionActivityResponse(
    public val arguments: List? = null,
    public val className: Any? = null,
    public val conf: Any? = null,
    public val configurationType: String? = null,
    public val dependsOn: List? = null,
    public val description: String? = null,
    public val driverSize: Any? = null,
    public val executorSize: Any? = null,
    public val `file`: Any? = null,
    public val files: List? = null,
    public val filesV2: List? = null,
    public val linkedServiceName: LinkedServiceReferenceResponse? = null,
    public val name: String,
    public val numExecutors: Any? = null,
    public val onInactiveMarkAs: String? = null,
    public val policy: ActivityPolicyResponse? = null,
    public val pythonCodeReference: List? = null,
    public val scanFolder: Any? = null,
    public val sparkConfig: Map? = null,
    public val sparkJob: SynapseSparkJobReferenceResponse,
    public val state: String? = null,
    public val targetBigDataPool: BigDataPoolParametrizationReferenceResponse? = null,
    public val targetSparkConfiguration: SparkConfigurationParametrizationReferenceResponse? = null,
    public val type: String,
    public val userProperties: List? = null,
) {
    public companion object {
        public fun toKotlin(javaType: com.pulumi.azurenative.datafactory.outputs.SynapseSparkJobDefinitionActivityResponse): SynapseSparkJobDefinitionActivityResponse = SynapseSparkJobDefinitionActivityResponse(
            arguments = javaType.arguments().map({ args0 -> args0 }),
            className = javaType.className().map({ args0 -> args0 }).orElse(null),
            conf = javaType.conf().map({ args0 -> args0 }).orElse(null),
            configurationType = javaType.configurationType().map({ args0 -> args0 }).orElse(null),
            dependsOn = javaType.dependsOn().map({ args0 ->
                args0.let({ args0 ->
                    com.pulumi.azurenative.datafactory.kotlin.outputs.ActivityDependencyResponse.Companion.toKotlin(args0)
                })
            }),
            description = javaType.description().map({ args0 -> args0 }).orElse(null),
            driverSize = javaType.driverSize().map({ args0 -> args0 }).orElse(null),
            executorSize = javaType.executorSize().map({ args0 -> args0 }).orElse(null),
            `file` = javaType.`file`().map({ args0 -> args0 }).orElse(null),
            files = javaType.files().map({ args0 -> args0 }),
            filesV2 = javaType.filesV2().map({ args0 -> args0 }),
            linkedServiceName = javaType.linkedServiceName().map({ args0 ->
                args0.let({ args0 ->
                    com.pulumi.azurenative.datafactory.kotlin.outputs.LinkedServiceReferenceResponse.Companion.toKotlin(args0)
                })
            }).orElse(null),
            name = javaType.name(),
            numExecutors = javaType.numExecutors().map({ args0 -> args0 }).orElse(null),
            onInactiveMarkAs = javaType.onInactiveMarkAs().map({ args0 -> args0 }).orElse(null),
            policy = javaType.policy().map({ args0 ->
                args0.let({ args0 ->
                    com.pulumi.azurenative.datafactory.kotlin.outputs.ActivityPolicyResponse.Companion.toKotlin(args0)
                })
            }).orElse(null),
            pythonCodeReference = javaType.pythonCodeReference().map({ args0 -> args0 }),
            scanFolder = javaType.scanFolder().map({ args0 -> args0 }).orElse(null),
            sparkConfig = javaType.sparkConfig().map({ args0 -> args0.key.to(args0.value) }).toMap(),
            sparkJob = javaType.sparkJob().let({ args0 ->
                com.pulumi.azurenative.datafactory.kotlin.outputs.SynapseSparkJobReferenceResponse.Companion.toKotlin(args0)
            }),
            state = javaType.state().map({ args0 -> args0 }).orElse(null),
            targetBigDataPool = javaType.targetBigDataPool().map({ args0 ->
                args0.let({ args0 ->
                    com.pulumi.azurenative.datafactory.kotlin.outputs.BigDataPoolParametrizationReferenceResponse.Companion.toKotlin(args0)
                })
            }).orElse(null),
            targetSparkConfiguration = javaType.targetSparkConfiguration().map({ args0 ->
                args0.let({ args0 ->
                    com.pulumi.azurenative.datafactory.kotlin.outputs.SparkConfigurationParametrizationReferenceResponse.Companion.toKotlin(args0)
                })
            }).orElse(null),
            type = javaType.type(),
            userProperties = javaType.userProperties().map({ args0 ->
                args0.let({ args0 ->
                    com.pulumi.azurenative.datafactory.kotlin.outputs.UserPropertyResponse.Companion.toKotlin(args0)
                })
            }),
        )
    }
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy