All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.azure.datafactory.DatasetParquetArgs Maven / Gradle / Ivy

Go to download

A Pulumi package for creating and managing Microsoft Azure cloud resources, based on the Terraform azurerm provider. We recommend using the [Azure Native provider](https://github.com/pulumi/pulumi-azure-native) to provision Azure infrastructure. Azure Native provides complete coverage of Azure resources and same-day access to new resources and resource updates.

There is a newer version: 6.10.0-alpha.1731737215
Show newest version
// *** WARNING: this file was generated by pulumi-java-gen. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***

package com.pulumi.azure.datafactory;

import com.pulumi.azure.datafactory.inputs.DatasetParquetAzureBlobFsLocationArgs;
import com.pulumi.azure.datafactory.inputs.DatasetParquetAzureBlobStorageLocationArgs;
import com.pulumi.azure.datafactory.inputs.DatasetParquetHttpServerLocationArgs;
import com.pulumi.azure.datafactory.inputs.DatasetParquetSchemaColumnArgs;
import com.pulumi.core.Output;
import com.pulumi.core.annotations.Import;
import com.pulumi.exceptions.MissingRequiredPropertyException;
import java.lang.String;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import javax.annotation.Nullable;


public final class DatasetParquetArgs extends com.pulumi.resources.ResourceArgs {

    public static final DatasetParquetArgs Empty = new DatasetParquetArgs();

    /**
     * A map of additional properties to associate with the Data Factory Dataset.
     * 
     * The following supported locations for a Parquet Dataset:
     * 
     */
    @Import(name="additionalProperties")
    private @Nullable Output> additionalProperties;

    /**
     * @return A map of additional properties to associate with the Data Factory Dataset.
     * 
     * The following supported locations for a Parquet Dataset:
     * 
     */
    public Optional>> additionalProperties() {
        return Optional.ofNullable(this.additionalProperties);
    }

    /**
     * List of tags that can be used for describing the Data Factory Dataset.
     * 
     */
    @Import(name="annotations")
    private @Nullable Output> annotations;

    /**
     * @return List of tags that can be used for describing the Data Factory Dataset.
     * 
     */
    public Optional>> annotations() {
        return Optional.ofNullable(this.annotations);
    }

    /**
     * A `azure_blob_fs_location` block as defined below.
     * 
     */
    @Import(name="azureBlobFsLocation")
    private @Nullable Output azureBlobFsLocation;

    /**
     * @return A `azure_blob_fs_location` block as defined below.
     * 
     */
    public Optional> azureBlobFsLocation() {
        return Optional.ofNullable(this.azureBlobFsLocation);
    }

    /**
     * A `azure_blob_storage_location` block as defined below.
     * 
     * The following supported arguments are specific to Parquet Dataset:
     * 
     */
    @Import(name="azureBlobStorageLocation")
    private @Nullable Output azureBlobStorageLocation;

    /**
     * @return A `azure_blob_storage_location` block as defined below.
     * 
     * The following supported arguments are specific to Parquet Dataset:
     * 
     */
    public Optional> azureBlobStorageLocation() {
        return Optional.ofNullable(this.azureBlobStorageLocation);
    }

    /**
     * The compression codec used to read/write text files. Valid values are `bzip2`, `gzip`, `deflate`, `ZipDeflate`, `TarGzip`, `Tar`, `snappy`, or `lz4`. Please note these values are case-sensitive.
     * 
     */
    @Import(name="compressionCodec")
    private @Nullable Output compressionCodec;

    /**
     * @return The compression codec used to read/write text files. Valid values are `bzip2`, `gzip`, `deflate`, `ZipDeflate`, `TarGzip`, `Tar`, `snappy`, or `lz4`. Please note these values are case-sensitive.
     * 
     */
    public Optional> compressionCodec() {
        return Optional.ofNullable(this.compressionCodec);
    }

    /**
     * Specifies the compression level. Possible values are `Optimal` and `Fastest`,
     * 
     */
    @Import(name="compressionLevel")
    private @Nullable Output compressionLevel;

    /**
     * @return Specifies the compression level. Possible values are `Optimal` and `Fastest`,
     * 
     */
    public Optional> compressionLevel() {
        return Optional.ofNullable(this.compressionLevel);
    }

    /**
     * The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
     * 
     */
    @Import(name="dataFactoryId", required=true)
    private Output dataFactoryId;

    /**
     * @return The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
     * 
     */
    public Output dataFactoryId() {
        return this.dataFactoryId;
    }

    /**
     * The description for the Data Factory Dataset.
     * 
     */
    @Import(name="description")
    private @Nullable Output description;

    /**
     * @return The description for the Data Factory Dataset.
     * 
     */
    public Optional> description() {
        return Optional.ofNullable(this.description);
    }

    /**
     * The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
     * 
     */
    @Import(name="folder")
    private @Nullable Output folder;

    /**
     * @return The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
     * 
     */
    public Optional> folder() {
        return Optional.ofNullable(this.folder);
    }

    /**
     * A `http_server_location` block as defined below.
     * 
     */
    @Import(name="httpServerLocation")
    private @Nullable Output httpServerLocation;

    /**
     * @return A `http_server_location` block as defined below.
     * 
     */
    public Optional> httpServerLocation() {
        return Optional.ofNullable(this.httpServerLocation);
    }

    /**
     * The Data Factory Linked Service name in which to associate the Dataset with.
     * 
     */
    @Import(name="linkedServiceName", required=true)
    private Output linkedServiceName;

    /**
     * @return The Data Factory Linked Service name in which to associate the Dataset with.
     * 
     */
    public Output linkedServiceName() {
        return this.linkedServiceName;
    }

    /**
     * Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the [Microsoft documentation](https://docs.microsoft.com/azure/data-factory/naming-rules) for all restrictions.
     * 
     */
    @Import(name="name")
    private @Nullable Output name;

    /**
     * @return Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the [Microsoft documentation](https://docs.microsoft.com/azure/data-factory/naming-rules) for all restrictions.
     * 
     */
    public Optional> name() {
        return Optional.ofNullable(this.name);
    }

    /**
     * A map of parameters to associate with the Data Factory Dataset.
     * 
     */
    @Import(name="parameters")
    private @Nullable Output> parameters;

    /**
     * @return A map of parameters to associate with the Data Factory Dataset.
     * 
     */
    public Optional>> parameters() {
        return Optional.ofNullable(this.parameters);
    }

    /**
     * A `schema_column` block as defined below.
     * 
     */
    @Import(name="schemaColumns")
    private @Nullable Output> schemaColumns;

    /**
     * @return A `schema_column` block as defined below.
     * 
     */
    public Optional>> schemaColumns() {
        return Optional.ofNullable(this.schemaColumns);
    }

    private DatasetParquetArgs() {}

    private DatasetParquetArgs(DatasetParquetArgs $) {
        this.additionalProperties = $.additionalProperties;
        this.annotations = $.annotations;
        this.azureBlobFsLocation = $.azureBlobFsLocation;
        this.azureBlobStorageLocation = $.azureBlobStorageLocation;
        this.compressionCodec = $.compressionCodec;
        this.compressionLevel = $.compressionLevel;
        this.dataFactoryId = $.dataFactoryId;
        this.description = $.description;
        this.folder = $.folder;
        this.httpServerLocation = $.httpServerLocation;
        this.linkedServiceName = $.linkedServiceName;
        this.name = $.name;
        this.parameters = $.parameters;
        this.schemaColumns = $.schemaColumns;
    }

    public static Builder builder() {
        return new Builder();
    }
    public static Builder builder(DatasetParquetArgs defaults) {
        return new Builder(defaults);
    }

    public static final class Builder {
        private DatasetParquetArgs $;

        public Builder() {
            $ = new DatasetParquetArgs();
        }

        public Builder(DatasetParquetArgs defaults) {
            $ = new DatasetParquetArgs(Objects.requireNonNull(defaults));
        }

        /**
         * @param additionalProperties A map of additional properties to associate with the Data Factory Dataset.
         * 
         * The following supported locations for a Parquet Dataset:
         * 
         * @return builder
         * 
         */
        public Builder additionalProperties(@Nullable Output> additionalProperties) {
            $.additionalProperties = additionalProperties;
            return this;
        }

        /**
         * @param additionalProperties A map of additional properties to associate with the Data Factory Dataset.
         * 
         * The following supported locations for a Parquet Dataset:
         * 
         * @return builder
         * 
         */
        public Builder additionalProperties(Map additionalProperties) {
            return additionalProperties(Output.of(additionalProperties));
        }

        /**
         * @param annotations List of tags that can be used for describing the Data Factory Dataset.
         * 
         * @return builder
         * 
         */
        public Builder annotations(@Nullable Output> annotations) {
            $.annotations = annotations;
            return this;
        }

        /**
         * @param annotations List of tags that can be used for describing the Data Factory Dataset.
         * 
         * @return builder
         * 
         */
        public Builder annotations(List annotations) {
            return annotations(Output.of(annotations));
        }

        /**
         * @param annotations List of tags that can be used for describing the Data Factory Dataset.
         * 
         * @return builder
         * 
         */
        public Builder annotations(String... annotations) {
            return annotations(List.of(annotations));
        }

        /**
         * @param azureBlobFsLocation A `azure_blob_fs_location` block as defined below.
         * 
         * @return builder
         * 
         */
        public Builder azureBlobFsLocation(@Nullable Output azureBlobFsLocation) {
            $.azureBlobFsLocation = azureBlobFsLocation;
            return this;
        }

        /**
         * @param azureBlobFsLocation A `azure_blob_fs_location` block as defined below.
         * 
         * @return builder
         * 
         */
        public Builder azureBlobFsLocation(DatasetParquetAzureBlobFsLocationArgs azureBlobFsLocation) {
            return azureBlobFsLocation(Output.of(azureBlobFsLocation));
        }

        /**
         * @param azureBlobStorageLocation A `azure_blob_storage_location` block as defined below.
         * 
         * The following supported arguments are specific to Parquet Dataset:
         * 
         * @return builder
         * 
         */
        public Builder azureBlobStorageLocation(@Nullable Output azureBlobStorageLocation) {
            $.azureBlobStorageLocation = azureBlobStorageLocation;
            return this;
        }

        /**
         * @param azureBlobStorageLocation A `azure_blob_storage_location` block as defined below.
         * 
         * The following supported arguments are specific to Parquet Dataset:
         * 
         * @return builder
         * 
         */
        public Builder azureBlobStorageLocation(DatasetParquetAzureBlobStorageLocationArgs azureBlobStorageLocation) {
            return azureBlobStorageLocation(Output.of(azureBlobStorageLocation));
        }

        /**
         * @param compressionCodec The compression codec used to read/write text files. Valid values are `bzip2`, `gzip`, `deflate`, `ZipDeflate`, `TarGzip`, `Tar`, `snappy`, or `lz4`. Please note these values are case-sensitive.
         * 
         * @return builder
         * 
         */
        public Builder compressionCodec(@Nullable Output compressionCodec) {
            $.compressionCodec = compressionCodec;
            return this;
        }

        /**
         * @param compressionCodec The compression codec used to read/write text files. Valid values are `bzip2`, `gzip`, `deflate`, `ZipDeflate`, `TarGzip`, `Tar`, `snappy`, or `lz4`. Please note these values are case-sensitive.
         * 
         * @return builder
         * 
         */
        public Builder compressionCodec(String compressionCodec) {
            return compressionCodec(Output.of(compressionCodec));
        }

        /**
         * @param compressionLevel Specifies the compression level. Possible values are `Optimal` and `Fastest`,
         * 
         * @return builder
         * 
         */
        public Builder compressionLevel(@Nullable Output compressionLevel) {
            $.compressionLevel = compressionLevel;
            return this;
        }

        /**
         * @param compressionLevel Specifies the compression level. Possible values are `Optimal` and `Fastest`,
         * 
         * @return builder
         * 
         */
        public Builder compressionLevel(String compressionLevel) {
            return compressionLevel(Output.of(compressionLevel));
        }

        /**
         * @param dataFactoryId The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
         * 
         * @return builder
         * 
         */
        public Builder dataFactoryId(Output dataFactoryId) {
            $.dataFactoryId = dataFactoryId;
            return this;
        }

        /**
         * @param dataFactoryId The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
         * 
         * @return builder
         * 
         */
        public Builder dataFactoryId(String dataFactoryId) {
            return dataFactoryId(Output.of(dataFactoryId));
        }

        /**
         * @param description The description for the Data Factory Dataset.
         * 
         * @return builder
         * 
         */
        public Builder description(@Nullable Output description) {
            $.description = description;
            return this;
        }

        /**
         * @param description The description for the Data Factory Dataset.
         * 
         * @return builder
         * 
         */
        public Builder description(String description) {
            return description(Output.of(description));
        }

        /**
         * @param folder The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
         * 
         * @return builder
         * 
         */
        public Builder folder(@Nullable Output folder) {
            $.folder = folder;
            return this;
        }

        /**
         * @param folder The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
         * 
         * @return builder
         * 
         */
        public Builder folder(String folder) {
            return folder(Output.of(folder));
        }

        /**
         * @param httpServerLocation A `http_server_location` block as defined below.
         * 
         * @return builder
         * 
         */
        public Builder httpServerLocation(@Nullable Output httpServerLocation) {
            $.httpServerLocation = httpServerLocation;
            return this;
        }

        /**
         * @param httpServerLocation A `http_server_location` block as defined below.
         * 
         * @return builder
         * 
         */
        public Builder httpServerLocation(DatasetParquetHttpServerLocationArgs httpServerLocation) {
            return httpServerLocation(Output.of(httpServerLocation));
        }

        /**
         * @param linkedServiceName The Data Factory Linked Service name in which to associate the Dataset with.
         * 
         * @return builder
         * 
         */
        public Builder linkedServiceName(Output linkedServiceName) {
            $.linkedServiceName = linkedServiceName;
            return this;
        }

        /**
         * @param linkedServiceName The Data Factory Linked Service name in which to associate the Dataset with.
         * 
         * @return builder
         * 
         */
        public Builder linkedServiceName(String linkedServiceName) {
            return linkedServiceName(Output.of(linkedServiceName));
        }

        /**
         * @param name Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the [Microsoft documentation](https://docs.microsoft.com/azure/data-factory/naming-rules) for all restrictions.
         * 
         * @return builder
         * 
         */
        public Builder name(@Nullable Output name) {
            $.name = name;
            return this;
        }

        /**
         * @param name Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the [Microsoft documentation](https://docs.microsoft.com/azure/data-factory/naming-rules) for all restrictions.
         * 
         * @return builder
         * 
         */
        public Builder name(String name) {
            return name(Output.of(name));
        }

        /**
         * @param parameters A map of parameters to associate with the Data Factory Dataset.
         * 
         * @return builder
         * 
         */
        public Builder parameters(@Nullable Output> parameters) {
            $.parameters = parameters;
            return this;
        }

        /**
         * @param parameters A map of parameters to associate with the Data Factory Dataset.
         * 
         * @return builder
         * 
         */
        public Builder parameters(Map parameters) {
            return parameters(Output.of(parameters));
        }

        /**
         * @param schemaColumns A `schema_column` block as defined below.
         * 
         * @return builder
         * 
         */
        public Builder schemaColumns(@Nullable Output> schemaColumns) {
            $.schemaColumns = schemaColumns;
            return this;
        }

        /**
         * @param schemaColumns A `schema_column` block as defined below.
         * 
         * @return builder
         * 
         */
        public Builder schemaColumns(List schemaColumns) {
            return schemaColumns(Output.of(schemaColumns));
        }

        /**
         * @param schemaColumns A `schema_column` block as defined below.
         * 
         * @return builder
         * 
         */
        public Builder schemaColumns(DatasetParquetSchemaColumnArgs... schemaColumns) {
            return schemaColumns(List.of(schemaColumns));
        }

        public DatasetParquetArgs build() {
            if ($.dataFactoryId == null) {
                throw new MissingRequiredPropertyException("DatasetParquetArgs", "dataFactoryId");
            }
            if ($.linkedServiceName == null) {
                throw new MissingRequiredPropertyException("DatasetParquetArgs", "linkedServiceName");
            }
            return $;
        }
    }

}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy