![JAR search and dependency download from the Maven repository](/logo.png)
com.pulumi.azurenative.synapse.kotlin.outputs.GetBigDataPoolResult.kt Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of pulumi-azure-native-kotlin Show documentation
Show all versions of pulumi-azure-native-kotlin Show documentation
Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.
@file:Suppress("NAME_SHADOWING", "DEPRECATION")
package com.pulumi.azurenative.synapse.kotlin.outputs
import kotlin.Boolean
import kotlin.Int
import kotlin.String
import kotlin.Suppress
import kotlin.collections.List
import kotlin.collections.Map
/**
* A Big Data pool
* @property autoPause Auto-pausing properties
* @property autoScale Auto-scaling properties
* @property cacheSize The cache size
* @property creationDate The time when the Big Data pool was created.
* @property customLibraries List of custom libraries/packages associated with the spark pool.
* @property defaultSparkLogFolder The default folder where Spark logs will be written.
* @property dynamicExecutorAllocation Dynamic Executor Allocation
* @property id Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
* @property isAutotuneEnabled Whether autotune is required or not.
* @property isComputeIsolationEnabled Whether compute isolation is required or not.
* @property lastSucceededTimestamp The time when the Big Data pool was updated successfully.
* @property libraryRequirements Library version requirements
* @property location The geo-location where the resource lives
* @property name The name of the resource
* @property nodeCount The number of nodes in the Big Data pool.
* @property nodeSize The level of compute power that each node in the Big Data pool has.
* @property nodeSizeFamily The kind of nodes that the Big Data pool provides.
* @property provisioningState The state of the Big Data pool.
* @property sessionLevelPackagesEnabled Whether session level packages enabled.
* @property sparkConfigProperties Spark configuration file to specify additional properties
* @property sparkEventsFolder The Spark events folder
* @property sparkVersion The Apache Spark version.
* @property tags Resource tags.
* @property type The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
*/
public data class GetBigDataPoolResult(
public val autoPause: AutoPausePropertiesResponse? = null,
public val autoScale: AutoScalePropertiesResponse? = null,
public val cacheSize: Int? = null,
public val creationDate: String,
public val customLibraries: List? = null,
public val defaultSparkLogFolder: String? = null,
public val dynamicExecutorAllocation: DynamicExecutorAllocationResponse? = null,
public val id: String,
public val isAutotuneEnabled: Boolean? = null,
public val isComputeIsolationEnabled: Boolean? = null,
public val lastSucceededTimestamp: String,
public val libraryRequirements: LibraryRequirementsResponse? = null,
public val location: String,
public val name: String,
public val nodeCount: Int? = null,
public val nodeSize: String? = null,
public val nodeSizeFamily: String? = null,
public val provisioningState: String? = null,
public val sessionLevelPackagesEnabled: Boolean? = null,
public val sparkConfigProperties: SparkConfigPropertiesResponse? = null,
public val sparkEventsFolder: String? = null,
public val sparkVersion: String? = null,
public val tags: Map? = null,
public val type: String,
) {
public companion object {
public fun toKotlin(javaType: com.pulumi.azurenative.synapse.outputs.GetBigDataPoolResult): GetBigDataPoolResult = GetBigDataPoolResult(
autoPause = javaType.autoPause().map({ args0 ->
args0.let({ args0 ->
com.pulumi.azurenative.synapse.kotlin.outputs.AutoPausePropertiesResponse.Companion.toKotlin(args0)
})
}).orElse(null),
autoScale = javaType.autoScale().map({ args0 ->
args0.let({ args0 ->
com.pulumi.azurenative.synapse.kotlin.outputs.AutoScalePropertiesResponse.Companion.toKotlin(args0)
})
}).orElse(null),
cacheSize = javaType.cacheSize().map({ args0 -> args0 }).orElse(null),
creationDate = javaType.creationDate(),
customLibraries = javaType.customLibraries().map({ args0 ->
args0.let({ args0 ->
com.pulumi.azurenative.synapse.kotlin.outputs.LibraryInfoResponse.Companion.toKotlin(args0)
})
}),
defaultSparkLogFolder = javaType.defaultSparkLogFolder().map({ args0 -> args0 }).orElse(null),
dynamicExecutorAllocation = javaType.dynamicExecutorAllocation().map({ args0 ->
args0.let({ args0 ->
com.pulumi.azurenative.synapse.kotlin.outputs.DynamicExecutorAllocationResponse.Companion.toKotlin(args0)
})
}).orElse(null),
id = javaType.id(),
isAutotuneEnabled = javaType.isAutotuneEnabled().map({ args0 -> args0 }).orElse(null),
isComputeIsolationEnabled = javaType.isComputeIsolationEnabled().map({ args0 ->
args0
}).orElse(null),
lastSucceededTimestamp = javaType.lastSucceededTimestamp(),
libraryRequirements = javaType.libraryRequirements().map({ args0 ->
args0.let({ args0 ->
com.pulumi.azurenative.synapse.kotlin.outputs.LibraryRequirementsResponse.Companion.toKotlin(args0)
})
}).orElse(null),
location = javaType.location(),
name = javaType.name(),
nodeCount = javaType.nodeCount().map({ args0 -> args0 }).orElse(null),
nodeSize = javaType.nodeSize().map({ args0 -> args0 }).orElse(null),
nodeSizeFamily = javaType.nodeSizeFamily().map({ args0 -> args0 }).orElse(null),
provisioningState = javaType.provisioningState().map({ args0 -> args0 }).orElse(null),
sessionLevelPackagesEnabled = javaType.sessionLevelPackagesEnabled().map({ args0 ->
args0
}).orElse(null),
sparkConfigProperties = javaType.sparkConfigProperties().map({ args0 ->
args0.let({ args0 ->
com.pulumi.azurenative.synapse.kotlin.outputs.SparkConfigPropertiesResponse.Companion.toKotlin(args0)
})
}).orElse(null),
sparkEventsFolder = javaType.sparkEventsFolder().map({ args0 -> args0 }).orElse(null),
sparkVersion = javaType.sparkVersion().map({ args0 -> args0 }).orElse(null),
tags = javaType.tags().map({ args0 -> args0.key.to(args0.value) }).toMap(),
type = javaType.type(),
)
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy