com.pulumi.gcp.dataproc.kotlin.outputs.WorkflowTemplateJobHadoopJob.kt Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of pulumi-gcp-kotlin Show documentation
Show all versions of pulumi-gcp-kotlin Show documentation
Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.
@file:Suppress("NAME_SHADOWING", "DEPRECATION")
package com.pulumi.gcp.dataproc.kotlin.outputs
import kotlin.String
import kotlin.Suppress
import kotlin.collections.List
import kotlin.collections.Map
/**
*
* @property archiveUris HCFS URIs of archives to be extracted in the working directory of Hadoop drivers and tasks. Supported file types: .jar, .tar, .tar.gz, .tgz, or .zip.
* @property args The arguments to pass to the driver. Do not include arguments, such as `-libjars` or `-Dfoo=bar`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
* @property fileUris HCFS (Hadoop Compatible Filesystem) URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.
* @property jarFileUris Jar file URIs to add to the CLASSPATHs of the Hadoop driver and tasks.
* @property loggingConfig The runtime log config for job execution.
* @property mainClass The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in `jar_file_uris`.
* @property mainJarFileUri The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'
* @property properties A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site and classes in user code.
* */
*/
public data class WorkflowTemplateJobHadoopJob(
public val archiveUris: List? = null,
public val args: List? = null,
public val fileUris: List? = null,
public val jarFileUris: List? = null,
public val loggingConfig: WorkflowTemplateJobHadoopJobLoggingConfig? = null,
public val mainClass: String? = null,
public val mainJarFileUri: String? = null,
public val properties: Map? = null,
) {
public companion object {
public fun toKotlin(javaType: com.pulumi.gcp.dataproc.outputs.WorkflowTemplateJobHadoopJob): WorkflowTemplateJobHadoopJob = WorkflowTemplateJobHadoopJob(
archiveUris = javaType.archiveUris().map({ args0 -> args0 }),
args = javaType.args().map({ args0 -> args0 }),
fileUris = javaType.fileUris().map({ args0 -> args0 }),
jarFileUris = javaType.jarFileUris().map({ args0 -> args0 }),
loggingConfig = javaType.loggingConfig().map({ args0 ->
args0.let({ args0 ->
com.pulumi.gcp.dataproc.kotlin.outputs.WorkflowTemplateJobHadoopJobLoggingConfig.Companion.toKotlin(args0)
})
}).orElse(null),
mainClass = javaType.mainClass().map({ args0 -> args0 }).orElse(null),
mainJarFileUri = javaType.mainJarFileUri().map({ args0 -> args0 }).orElse(null),
properties = javaType.properties().map({ args0 -> args0.key.to(args0.value) }).toMap(),
)
}
}
© 2015 - 2024 Weber Informatics LLC | Privacy Policy