All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.gcp.dataproc.kotlin.outputs.JobPysparkConfig.kt Maven / Gradle / Ivy

@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.gcp.dataproc.kotlin.outputs

import kotlin.String
import kotlin.Suppress
import kotlin.collections.List
import kotlin.collections.Map

/**
 *
 * @property archiveUris HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
 * @property args The arguments to pass to the driver.
 * @property fileUris HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks.
 * @property jarFileUris HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
 * @property loggingConfig The runtime logging config of the job
 * @property mainPythonFileUri The HCFS URI of the main Python file to use as the driver. Must be a .py file.
 * @property properties A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in `/etc/spark/conf/spark-defaults.conf` and classes in user code.
 * * `logging_config.driver_log_levels`- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 * @property pythonFileUris HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
 */
public data class JobPysparkConfig(
    public val archiveUris: List? = null,
    public val args: List? = null,
    public val fileUris: List? = null,
    public val jarFileUris: List? = null,
    public val loggingConfig: JobPysparkConfigLoggingConfig? = null,
    public val mainPythonFileUri: String,
    public val properties: Map? = null,
    public val pythonFileUris: List? = null,
) {
    public companion object {
        public fun toKotlin(javaType: com.pulumi.gcp.dataproc.outputs.JobPysparkConfig): JobPysparkConfig = JobPysparkConfig(
            archiveUris = javaType.archiveUris().map({ args0 -> args0 }),
            args = javaType.args().map({ args0 -> args0 }),
            fileUris = javaType.fileUris().map({ args0 -> args0 }),
            jarFileUris = javaType.jarFileUris().map({ args0 -> args0 }),
            loggingConfig = javaType.loggingConfig().map({ args0 ->
                args0.let({ args0 ->
                    com.pulumi.gcp.dataproc.kotlin.outputs.JobPysparkConfigLoggingConfig.Companion.toKotlin(args0)
                })
            }).orElse(null),
            mainPythonFileUri = javaType.mainPythonFileUri(),
            properties = javaType.properties().map({ args0 -> args0.key.to(args0.value) }).toMap(),
            pythonFileUris = javaType.pythonFileUris().map({ args0 -> args0 }),
        )
    }
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy