All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.azure.datafactory.DatasetParquet Maven / Gradle / Ivy

Go to download

A Pulumi package for creating and managing Microsoft Azure cloud resources, based on the Terraform azurerm provider. We recommend using the [Azure Native provider](https://github.com/pulumi/pulumi-azure-native) to provision Azure infrastructure. Azure Native provides complete coverage of Azure resources and same-day access to new resources and resource updates.

There is a newer version: 6.10.0-alpha.1731737215
Show newest version
// *** WARNING: this file was generated by pulumi-java-gen. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***

package com.pulumi.azure.datafactory;

import com.pulumi.azure.Utilities;
import com.pulumi.azure.datafactory.DatasetParquetArgs;
import com.pulumi.azure.datafactory.inputs.DatasetParquetState;
import com.pulumi.azure.datafactory.outputs.DatasetParquetAzureBlobFsLocation;
import com.pulumi.azure.datafactory.outputs.DatasetParquetAzureBlobStorageLocation;
import com.pulumi.azure.datafactory.outputs.DatasetParquetHttpServerLocation;
import com.pulumi.azure.datafactory.outputs.DatasetParquetSchemaColumn;
import com.pulumi.core.Output;
import com.pulumi.core.annotations.Export;
import com.pulumi.core.annotations.ResourceType;
import com.pulumi.core.internal.Codegen;
import java.lang.String;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import javax.annotation.Nullable;

/**
 * Manages an Azure Parquet Dataset inside an Azure Data Factory.
 * 
 * ## Example Usage
 * 
 * <!--Start PulumiCodeChooser -->
 * 
 * {@code
 * package generated_program;
 * 
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.azure.core.ResourceGroup;
 * import com.pulumi.azure.core.ResourceGroupArgs;
 * import com.pulumi.azure.datafactory.Factory;
 * import com.pulumi.azure.datafactory.FactoryArgs;
 * import com.pulumi.azure.datafactory.LinkedServiceWeb;
 * import com.pulumi.azure.datafactory.LinkedServiceWebArgs;
 * import com.pulumi.azure.datafactory.DatasetParquet;
 * import com.pulumi.azure.datafactory.DatasetParquetArgs;
 * import com.pulumi.azure.datafactory.inputs.DatasetParquetHttpServerLocationArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * 
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 * 
 *     public static void stack(Context ctx) {
 *         var example = new ResourceGroup("example", ResourceGroupArgs.builder()
 *             .name("example-resources")
 *             .location("West Europe")
 *             .build());
 * 
 *         var exampleFactory = new Factory("exampleFactory", FactoryArgs.builder()
 *             .name("example")
 *             .location(example.location())
 *             .resourceGroupName(example.name())
 *             .build());
 * 
 *         var exampleLinkedServiceWeb = new LinkedServiceWeb("exampleLinkedServiceWeb", LinkedServiceWebArgs.builder()
 *             .name("example")
 *             .dataFactoryId(exampleFactory.id())
 *             .authenticationType("Anonymous")
 *             .url("https://www.bing.com")
 *             .build());
 * 
 *         var exampleDatasetParquet = new DatasetParquet("exampleDatasetParquet", DatasetParquetArgs.builder()
 *             .name("example")
 *             .dataFactoryId(exampleFactory.id())
 *             .linkedServiceName(exampleLinkedServiceWeb.name())
 *             .httpServerLocation(DatasetParquetHttpServerLocationArgs.builder()
 *                 .relativeUrl("http://www.bing.com")
 *                 .path("foo/bar/")
 *                 .filename("fizz.txt")
 *                 .build())
 *             .build());
 * 
 *     }
 * }
 * }
 * 
* <!--End PulumiCodeChooser --> * * ## Import * * Data Factory Datasets can be imported using the `resource id`, e.g. * * ```sh * $ pulumi import azure:datafactory/datasetParquet:DatasetParquet example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/datasets/example * ``` * */ @ResourceType(type="azure:datafactory/datasetParquet:DatasetParquet") public class DatasetParquet extends com.pulumi.resources.CustomResource { /** * A map of additional properties to associate with the Data Factory Dataset. * * The following supported locations for a Parquet Dataset: * */ @Export(name="additionalProperties", refs={Map.class,String.class}, tree="[0,1,1]") private Output> additionalProperties; /** * @return A map of additional properties to associate with the Data Factory Dataset. * * The following supported locations for a Parquet Dataset: * */ public Output>> additionalProperties() { return Codegen.optional(this.additionalProperties); } /** * List of tags that can be used for describing the Data Factory Dataset. * */ @Export(name="annotations", refs={List.class,String.class}, tree="[0,1]") private Output> annotations; /** * @return List of tags that can be used for describing the Data Factory Dataset. * */ public Output>> annotations() { return Codegen.optional(this.annotations); } /** * A `azure_blob_fs_location` block as defined below. * */ @Export(name="azureBlobFsLocation", refs={DatasetParquetAzureBlobFsLocation.class}, tree="[0]") private Output azureBlobFsLocation; /** * @return A `azure_blob_fs_location` block as defined below. * */ public Output> azureBlobFsLocation() { return Codegen.optional(this.azureBlobFsLocation); } /** * A `azure_blob_storage_location` block as defined below. * * The following supported arguments are specific to Parquet Dataset: * */ @Export(name="azureBlobStorageLocation", refs={DatasetParquetAzureBlobStorageLocation.class}, tree="[0]") private Output azureBlobStorageLocation; /** * @return A `azure_blob_storage_location` block as defined below. * * The following supported arguments are specific to Parquet Dataset: * */ public Output> azureBlobStorageLocation() { return Codegen.optional(this.azureBlobStorageLocation); } /** * The compression codec used to read/write text files. Valid values are `bzip2`, `gzip`, `deflate`, `ZipDeflate`, `TarGzip`, `Tar`, `snappy`, or `lz4`. Please note these values are case-sensitive. * */ @Export(name="compressionCodec", refs={String.class}, tree="[0]") private Output compressionCodec; /** * @return The compression codec used to read/write text files. Valid values are `bzip2`, `gzip`, `deflate`, `ZipDeflate`, `TarGzip`, `Tar`, `snappy`, or `lz4`. Please note these values are case-sensitive. * */ public Output> compressionCodec() { return Codegen.optional(this.compressionCodec); } /** * Specifies the compression level. Possible values are `Optimal` and `Fastest`, * */ @Export(name="compressionLevel", refs={String.class}, tree="[0]") private Output compressionLevel; /** * @return Specifies the compression level. Possible values are `Optimal` and `Fastest`, * */ public Output> compressionLevel() { return Codegen.optional(this.compressionLevel); } /** * The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource. * */ @Export(name="dataFactoryId", refs={String.class}, tree="[0]") private Output dataFactoryId; /** * @return The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource. * */ public Output dataFactoryId() { return this.dataFactoryId; } /** * The description for the Data Factory Dataset. * */ @Export(name="description", refs={String.class}, tree="[0]") private Output description; /** * @return The description for the Data Factory Dataset. * */ public Output> description() { return Codegen.optional(this.description); } /** * The folder that this Dataset is in. If not specified, the Dataset will appear at the root level. * */ @Export(name="folder", refs={String.class}, tree="[0]") private Output folder; /** * @return The folder that this Dataset is in. If not specified, the Dataset will appear at the root level. * */ public Output> folder() { return Codegen.optional(this.folder); } /** * A `http_server_location` block as defined below. * */ @Export(name="httpServerLocation", refs={DatasetParquetHttpServerLocation.class}, tree="[0]") private Output httpServerLocation; /** * @return A `http_server_location` block as defined below. * */ public Output> httpServerLocation() { return Codegen.optional(this.httpServerLocation); } /** * The Data Factory Linked Service name in which to associate the Dataset with. * */ @Export(name="linkedServiceName", refs={String.class}, tree="[0]") private Output linkedServiceName; /** * @return The Data Factory Linked Service name in which to associate the Dataset with. * */ public Output linkedServiceName() { return this.linkedServiceName; } /** * Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the [Microsoft documentation](https://docs.microsoft.com/azure/data-factory/naming-rules) for all restrictions. * */ @Export(name="name", refs={String.class}, tree="[0]") private Output name; /** * @return Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the [Microsoft documentation](https://docs.microsoft.com/azure/data-factory/naming-rules) for all restrictions. * */ public Output name() { return this.name; } /** * A map of parameters to associate with the Data Factory Dataset. * */ @Export(name="parameters", refs={Map.class,String.class}, tree="[0,1,1]") private Output> parameters; /** * @return A map of parameters to associate with the Data Factory Dataset. * */ public Output>> parameters() { return Codegen.optional(this.parameters); } /** * A `schema_column` block as defined below. * */ @Export(name="schemaColumns", refs={List.class,DatasetParquetSchemaColumn.class}, tree="[0,1]") private Output> schemaColumns; /** * @return A `schema_column` block as defined below. * */ public Output>> schemaColumns() { return Codegen.optional(this.schemaColumns); } /** * * @param name The _unique_ name of the resulting resource. */ public DatasetParquet(java.lang.String name) { this(name, DatasetParquetArgs.Empty); } /** * * @param name The _unique_ name of the resulting resource. * @param args The arguments to use to populate this resource's properties. */ public DatasetParquet(java.lang.String name, DatasetParquetArgs args) { this(name, args, null); } /** * * @param name The _unique_ name of the resulting resource. * @param args The arguments to use to populate this resource's properties. * @param options A bag of options that control this resource's behavior. */ public DatasetParquet(java.lang.String name, DatasetParquetArgs args, @Nullable com.pulumi.resources.CustomResourceOptions options) { super("azure:datafactory/datasetParquet:DatasetParquet", name, makeArgs(args, options), makeResourceOptions(options, Codegen.empty()), false); } private DatasetParquet(java.lang.String name, Output id, @Nullable DatasetParquetState state, @Nullable com.pulumi.resources.CustomResourceOptions options) { super("azure:datafactory/datasetParquet:DatasetParquet", name, state, makeResourceOptions(options, id), false); } private static DatasetParquetArgs makeArgs(DatasetParquetArgs args, @Nullable com.pulumi.resources.CustomResourceOptions options) { if (options != null && options.getUrn().isPresent()) { return null; } return args == null ? DatasetParquetArgs.Empty : args; } private static com.pulumi.resources.CustomResourceOptions makeResourceOptions(@Nullable com.pulumi.resources.CustomResourceOptions options, @Nullable Output id) { var defaultOptions = com.pulumi.resources.CustomResourceOptions.builder() .version(Utilities.getVersion()) .build(); return com.pulumi.resources.CustomResourceOptions.merge(defaultOptions, options, id); } /** * Get an existing Host resource's state with the given name, ID, and optional extra * properties used to qualify the lookup. * * @param name The _unique_ name of the resulting resource. * @param id The _unique_ provider ID of the resource to lookup. * @param state * @param options Optional settings to control the behavior of the CustomResource. */ public static DatasetParquet get(java.lang.String name, Output id, @Nullable DatasetParquetState state, @Nullable com.pulumi.resources.CustomResourceOptions options) { return new DatasetParquet(name, id, state, options); } }




© 2015 - 2024 Weber Informatics LLC | Privacy Policy