All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.gcp.dataproc.kotlin.outputs.WorkflowTemplateJobSparkJob.kt Maven / Gradle / Ivy

Go to download

Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.

There is a newer version: 8.10.0.0
Show newest version
@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.gcp.dataproc.kotlin.outputs

import kotlin.String
import kotlin.Suppress
import kotlin.collections.List
import kotlin.collections.Map

/**
 *
 * @property archiveUris HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
 * @property args The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
 * @property fileUris HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
 * @property jarFileUris HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
 * @property loggingConfig The runtime log config for job execution.
 * @property mainClass The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in `jar_file_uris`.
 * @property mainJarFileUri The HCFS URI of the jar file that contains the main class.
 * @property properties A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
 */
public data class WorkflowTemplateJobSparkJob(
    public val archiveUris: List? = null,
    public val args: List? = null,
    public val fileUris: List? = null,
    public val jarFileUris: List? = null,
    public val loggingConfig: WorkflowTemplateJobSparkJobLoggingConfig? = null,
    public val mainClass: String? = null,
    public val mainJarFileUri: String? = null,
    public val properties: Map? = null,
) {
    public companion object {
        public fun toKotlin(javaType: com.pulumi.gcp.dataproc.outputs.WorkflowTemplateJobSparkJob): WorkflowTemplateJobSparkJob = WorkflowTemplateJobSparkJob(
            archiveUris = javaType.archiveUris().map({ args0 -> args0 }),
            args = javaType.args().map({ args0 -> args0 }),
            fileUris = javaType.fileUris().map({ args0 -> args0 }),
            jarFileUris = javaType.jarFileUris().map({ args0 -> args0 }),
            loggingConfig = javaType.loggingConfig().map({ args0 ->
                args0.let({ args0 ->
                    com.pulumi.gcp.dataproc.kotlin.outputs.WorkflowTemplateJobSparkJobLoggingConfig.Companion.toKotlin(args0)
                })
            }).orElse(null),
            mainClass = javaType.mainClass().map({ args0 -> args0 }).orElse(null),
            mainJarFileUri = javaType.mainJarFileUri().map({ args0 -> args0 }).orElse(null),
            properties = javaType.properties().map({ args0 -> args0.key.to(args0.value) }).toMap(),
        )
    }
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy