com.pulumi.azurenative.synapse.BigDataPool Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of azure-native Show documentation
Show all versions of azure-native Show documentation
A native Pulumi package for creating and managing Azure resources.
// *** WARNING: this file was generated by pulumi-java-gen. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package com.pulumi.azurenative.synapse;
import com.pulumi.azurenative.Utilities;
import com.pulumi.azurenative.synapse.BigDataPoolArgs;
import com.pulumi.azurenative.synapse.outputs.AutoPausePropertiesResponse;
import com.pulumi.azurenative.synapse.outputs.AutoScalePropertiesResponse;
import com.pulumi.azurenative.synapse.outputs.DynamicExecutorAllocationResponse;
import com.pulumi.azurenative.synapse.outputs.LibraryInfoResponse;
import com.pulumi.azurenative.synapse.outputs.LibraryRequirementsResponse;
import com.pulumi.azurenative.synapse.outputs.SparkConfigPropertiesResponse;
import com.pulumi.core.Alias;
import com.pulumi.core.Output;
import com.pulumi.core.annotations.Export;
import com.pulumi.core.annotations.ResourceType;
import com.pulumi.core.internal.Codegen;
import java.lang.Boolean;
import java.lang.Integer;
import java.lang.String;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import javax.annotation.Nullable;
/**
* A Big Data pool
* Azure REST API version: 2021-06-01. Prior API version in Azure Native 1.x: 2021-03-01.
*
* Other available API versions: 2021-05-01, 2021-06-01-preview.
*
* ## Example Usage
* ### Create or update a Big Data pool
*
*
* {@code
* package generated_program;
*
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.azurenative.synapse.BigDataPool;
* import com.pulumi.azurenative.synapse.BigDataPoolArgs;
* import com.pulumi.azurenative.synapse.inputs.AutoPausePropertiesArgs;
* import com.pulumi.azurenative.synapse.inputs.AutoScalePropertiesArgs;
* import com.pulumi.azurenative.synapse.inputs.LibraryRequirementsArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
*
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
*
* public static void stack(Context ctx) {
* var bigDataPool = new BigDataPool("bigDataPool", BigDataPoolArgs.builder()
* .autoPause(AutoPausePropertiesArgs.builder()
* .delayInMinutes(15)
* .enabled(true)
* .build())
* .autoScale(AutoScalePropertiesArgs.builder()
* .enabled(true)
* .maxNodeCount(50)
* .minNodeCount(3)
* .build())
* .bigDataPoolName("ExamplePool")
* .defaultSparkLogFolder("/logs")
* .isAutotuneEnabled(false)
* .libraryRequirements(LibraryRequirementsArgs.builder()
* .content("")
* .filename("requirements.txt")
* .build())
* .location("West US 2")
* .nodeCount(4)
* .nodeSize("Medium")
* .nodeSizeFamily("MemoryOptimized")
* .resourceGroupName("ExampleResourceGroup")
* .sparkEventsFolder("/events")
* .sparkVersion("3.3")
* .tags(Map.of("key", "value"))
* .workspaceName("ExampleWorkspace")
* .build());
*
* }
* }
*
* }
*
*
* ## Import
*
* An existing resource can be imported using its type token, name, and identifier, e.g.
*
* ```sh
* $ pulumi import azure-native:synapse:BigDataPool ExamplePool /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/bigDataPools/{bigDataPoolName}
* ```
*
*/
@ResourceType(type="azure-native:synapse:BigDataPool")
public class BigDataPool extends com.pulumi.resources.CustomResource {
/**
* Auto-pausing properties
*
*/
@Export(name="autoPause", refs={AutoPausePropertiesResponse.class}, tree="[0]")
private Output* @Nullable */ AutoPausePropertiesResponse> autoPause;
/**
* @return Auto-pausing properties
*
*/
public Output> autoPause() {
return Codegen.optional(this.autoPause);
}
/**
* Auto-scaling properties
*
*/
@Export(name="autoScale", refs={AutoScalePropertiesResponse.class}, tree="[0]")
private Output* @Nullable */ AutoScalePropertiesResponse> autoScale;
/**
* @return Auto-scaling properties
*
*/
public Output> autoScale() {
return Codegen.optional(this.autoScale);
}
/**
* The cache size
*
*/
@Export(name="cacheSize", refs={Integer.class}, tree="[0]")
private Output* @Nullable */ Integer> cacheSize;
/**
* @return The cache size
*
*/
public Output> cacheSize() {
return Codegen.optional(this.cacheSize);
}
/**
* The time when the Big Data pool was created.
*
*/
@Export(name="creationDate", refs={String.class}, tree="[0]")
private Output creationDate;
/**
* @return The time when the Big Data pool was created.
*
*/
public Output creationDate() {
return this.creationDate;
}
/**
* List of custom libraries/packages associated with the spark pool.
*
*/
@Export(name="customLibraries", refs={List.class,LibraryInfoResponse.class}, tree="[0,1]")
private Output* @Nullable */ List> customLibraries;
/**
* @return List of custom libraries/packages associated with the spark pool.
*
*/
public Output>> customLibraries() {
return Codegen.optional(this.customLibraries);
}
/**
* The default folder where Spark logs will be written.
*
*/
@Export(name="defaultSparkLogFolder", refs={String.class}, tree="[0]")
private Output* @Nullable */ String> defaultSparkLogFolder;
/**
* @return The default folder where Spark logs will be written.
*
*/
public Output> defaultSparkLogFolder() {
return Codegen.optional(this.defaultSparkLogFolder);
}
/**
* Dynamic Executor Allocation
*
*/
@Export(name="dynamicExecutorAllocation", refs={DynamicExecutorAllocationResponse.class}, tree="[0]")
private Output* @Nullable */ DynamicExecutorAllocationResponse> dynamicExecutorAllocation;
/**
* @return Dynamic Executor Allocation
*
*/
public Output> dynamicExecutorAllocation() {
return Codegen.optional(this.dynamicExecutorAllocation);
}
/**
* Whether autotune is required or not.
*
*/
@Export(name="isAutotuneEnabled", refs={Boolean.class}, tree="[0]")
private Output* @Nullable */ Boolean> isAutotuneEnabled;
/**
* @return Whether autotune is required or not.
*
*/
public Output> isAutotuneEnabled() {
return Codegen.optional(this.isAutotuneEnabled);
}
/**
* Whether compute isolation is required or not.
*
*/
@Export(name="isComputeIsolationEnabled", refs={Boolean.class}, tree="[0]")
private Output* @Nullable */ Boolean> isComputeIsolationEnabled;
/**
* @return Whether compute isolation is required or not.
*
*/
public Output> isComputeIsolationEnabled() {
return Codegen.optional(this.isComputeIsolationEnabled);
}
/**
* The time when the Big Data pool was updated successfully.
*
*/
@Export(name="lastSucceededTimestamp", refs={String.class}, tree="[0]")
private Output lastSucceededTimestamp;
/**
* @return The time when the Big Data pool was updated successfully.
*
*/
public Output lastSucceededTimestamp() {
return this.lastSucceededTimestamp;
}
/**
* Library version requirements
*
*/
@Export(name="libraryRequirements", refs={LibraryRequirementsResponse.class}, tree="[0]")
private Output* @Nullable */ LibraryRequirementsResponse> libraryRequirements;
/**
* @return Library version requirements
*
*/
public Output> libraryRequirements() {
return Codegen.optional(this.libraryRequirements);
}
/**
* The geo-location where the resource lives
*
*/
@Export(name="location", refs={String.class}, tree="[0]")
private Output location;
/**
* @return The geo-location where the resource lives
*
*/
public Output location() {
return this.location;
}
/**
* The name of the resource
*
*/
@Export(name="name", refs={String.class}, tree="[0]")
private Output name;
/**
* @return The name of the resource
*
*/
public Output name() {
return this.name;
}
/**
* The number of nodes in the Big Data pool.
*
*/
@Export(name="nodeCount", refs={Integer.class}, tree="[0]")
private Output* @Nullable */ Integer> nodeCount;
/**
* @return The number of nodes in the Big Data pool.
*
*/
public Output> nodeCount() {
return Codegen.optional(this.nodeCount);
}
/**
* The level of compute power that each node in the Big Data pool has.
*
*/
@Export(name="nodeSize", refs={String.class}, tree="[0]")
private Output* @Nullable */ String> nodeSize;
/**
* @return The level of compute power that each node in the Big Data pool has.
*
*/
public Output> nodeSize() {
return Codegen.optional(this.nodeSize);
}
/**
* The kind of nodes that the Big Data pool provides.
*
*/
@Export(name="nodeSizeFamily", refs={String.class}, tree="[0]")
private Output* @Nullable */ String> nodeSizeFamily;
/**
* @return The kind of nodes that the Big Data pool provides.
*
*/
public Output> nodeSizeFamily() {
return Codegen.optional(this.nodeSizeFamily);
}
/**
* The state of the Big Data pool.
*
*/
@Export(name="provisioningState", refs={String.class}, tree="[0]")
private Output* @Nullable */ String> provisioningState;
/**
* @return The state of the Big Data pool.
*
*/
public Output> provisioningState() {
return Codegen.optional(this.provisioningState);
}
/**
* Whether session level packages enabled.
*
*/
@Export(name="sessionLevelPackagesEnabled", refs={Boolean.class}, tree="[0]")
private Output* @Nullable */ Boolean> sessionLevelPackagesEnabled;
/**
* @return Whether session level packages enabled.
*
*/
public Output> sessionLevelPackagesEnabled() {
return Codegen.optional(this.sessionLevelPackagesEnabled);
}
/**
* Spark configuration file to specify additional properties
*
*/
@Export(name="sparkConfigProperties", refs={SparkConfigPropertiesResponse.class}, tree="[0]")
private Output* @Nullable */ SparkConfigPropertiesResponse> sparkConfigProperties;
/**
* @return Spark configuration file to specify additional properties
*
*/
public Output> sparkConfigProperties() {
return Codegen.optional(this.sparkConfigProperties);
}
/**
* The Spark events folder
*
*/
@Export(name="sparkEventsFolder", refs={String.class}, tree="[0]")
private Output* @Nullable */ String> sparkEventsFolder;
/**
* @return The Spark events folder
*
*/
public Output> sparkEventsFolder() {
return Codegen.optional(this.sparkEventsFolder);
}
/**
* The Apache Spark version.
*
*/
@Export(name="sparkVersion", refs={String.class}, tree="[0]")
private Output* @Nullable */ String> sparkVersion;
/**
* @return The Apache Spark version.
*
*/
public Output> sparkVersion() {
return Codegen.optional(this.sparkVersion);
}
/**
* Resource tags.
*
*/
@Export(name="tags", refs={Map.class,String.class}, tree="[0,1,1]")
private Output* @Nullable */ Map> tags;
/**
* @return Resource tags.
*
*/
public Output>> tags() {
return Codegen.optional(this.tags);
}
/**
* The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
*
*/
@Export(name="type", refs={String.class}, tree="[0]")
private Output type;
/**
* @return The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
*
*/
public Output type() {
return this.type;
}
/**
*
* @param name The _unique_ name of the resulting resource.
*/
public BigDataPool(java.lang.String name) {
this(name, BigDataPoolArgs.Empty);
}
/**
*
* @param name The _unique_ name of the resulting resource.
* @param args The arguments to use to populate this resource's properties.
*/
public BigDataPool(java.lang.String name, BigDataPoolArgs args) {
this(name, args, null);
}
/**
*
* @param name The _unique_ name of the resulting resource.
* @param args The arguments to use to populate this resource's properties.
* @param options A bag of options that control this resource's behavior.
*/
public BigDataPool(java.lang.String name, BigDataPoolArgs args, @Nullable com.pulumi.resources.CustomResourceOptions options) {
super("azure-native:synapse:BigDataPool", name, makeArgs(args, options), makeResourceOptions(options, Codegen.empty()), false);
}
private BigDataPool(java.lang.String name, Output id, @Nullable com.pulumi.resources.CustomResourceOptions options) {
super("azure-native:synapse:BigDataPool", name, null, makeResourceOptions(options, id), false);
}
private static BigDataPoolArgs makeArgs(BigDataPoolArgs args, @Nullable com.pulumi.resources.CustomResourceOptions options) {
if (options != null && options.getUrn().isPresent()) {
return null;
}
return args == null ? BigDataPoolArgs.Empty : args;
}
private static com.pulumi.resources.CustomResourceOptions makeResourceOptions(@Nullable com.pulumi.resources.CustomResourceOptions options, @Nullable Output id) {
var defaultOptions = com.pulumi.resources.CustomResourceOptions.builder()
.version(Utilities.getVersion())
.aliases(List.of(
Output.of(Alias.builder().type("azure-native:synapse/v20190601preview:BigDataPool").build()),
Output.of(Alias.builder().type("azure-native:synapse/v20201201:BigDataPool").build()),
Output.of(Alias.builder().type("azure-native:synapse/v20210301:BigDataPool").build()),
Output.of(Alias.builder().type("azure-native:synapse/v20210401preview:BigDataPool").build()),
Output.of(Alias.builder().type("azure-native:synapse/v20210501:BigDataPool").build()),
Output.of(Alias.builder().type("azure-native:synapse/v20210601:BigDataPool").build()),
Output.of(Alias.builder().type("azure-native:synapse/v20210601preview:BigDataPool").build())
))
.build();
return com.pulumi.resources.CustomResourceOptions.merge(defaultOptions, options, id);
}
/**
* Get an existing Host resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param options Optional settings to control the behavior of the CustomResource.
*/
public static BigDataPool get(java.lang.String name, Output id, @Nullable com.pulumi.resources.CustomResourceOptions options) {
return new BigDataPool(name, id, options);
}
}
© 2015 - 2024 Weber Informatics LLC | Privacy Policy