All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.azure.datafactory.kotlin.DatasetParquetArgs.kt Maven / Gradle / Ivy

@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.azure.datafactory.kotlin

import com.pulumi.azure.datafactory.DatasetParquetArgs.builder
import com.pulumi.azure.datafactory.kotlin.inputs.DatasetParquetAzureBlobFsLocationArgs
import com.pulumi.azure.datafactory.kotlin.inputs.DatasetParquetAzureBlobFsLocationArgsBuilder
import com.pulumi.azure.datafactory.kotlin.inputs.DatasetParquetAzureBlobStorageLocationArgs
import com.pulumi.azure.datafactory.kotlin.inputs.DatasetParquetAzureBlobStorageLocationArgsBuilder
import com.pulumi.azure.datafactory.kotlin.inputs.DatasetParquetHttpServerLocationArgs
import com.pulumi.azure.datafactory.kotlin.inputs.DatasetParquetHttpServerLocationArgsBuilder
import com.pulumi.azure.datafactory.kotlin.inputs.DatasetParquetSchemaColumnArgs
import com.pulumi.azure.datafactory.kotlin.inputs.DatasetParquetSchemaColumnArgsBuilder
import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.applySuspend
import kotlin.Pair
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.collections.List
import kotlin.collections.Map
import kotlin.jvm.JvmName

/**
 * Manages an Azure Parquet Dataset inside an Azure Data Factory.
 * ## Example Usage
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as azure from "@pulumi/azure";
 * const example = new azure.core.ResourceGroup("example", {
 *     name: "example-resources",
 *     location: "West Europe",
 * });
 * const exampleFactory = new azure.datafactory.Factory("example", {
 *     name: "example",
 *     location: example.location,
 *     resourceGroupName: example.name,
 * });
 * const exampleLinkedServiceWeb = new azure.datafactory.LinkedServiceWeb("example", {
 *     name: "example",
 *     dataFactoryId: exampleFactory.id,
 *     authenticationType: "Anonymous",
 *     url: "https://www.bing.com",
 * });
 * const exampleDatasetParquet = new azure.datafactory.DatasetParquet("example", {
 *     name: "example",
 *     dataFactoryId: exampleFactory.id,
 *     linkedServiceName: exampleLinkedServiceWeb.name,
 *     httpServerLocation: {
 *         relativeUrl: "http://www.bing.com",
 *         path: "foo/bar/",
 *         filename: "fizz.txt",
 *     },
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_azure as azure
 * example = azure.core.ResourceGroup("example",
 *     name="example-resources",
 *     location="West Europe")
 * example_factory = azure.datafactory.Factory("example",
 *     name="example",
 *     location=example.location,
 *     resource_group_name=example.name)
 * example_linked_service_web = azure.datafactory.LinkedServiceWeb("example",
 *     name="example",
 *     data_factory_id=example_factory.id,
 *     authentication_type="Anonymous",
 *     url="https://www.bing.com")
 * example_dataset_parquet = azure.datafactory.DatasetParquet("example",
 *     name="example",
 *     data_factory_id=example_factory.id,
 *     linked_service_name=example_linked_service_web.name,
 *     http_server_location={
 *         "relative_url": "http://www.bing.com",
 *         "path": "foo/bar/",
 *         "filename": "fizz.txt",
 *     })
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Azure = Pulumi.Azure;
 * return await Deployment.RunAsync(() =>
 * {
 *     var example = new Azure.Core.ResourceGroup("example", new()
 *     {
 *         Name = "example-resources",
 *         Location = "West Europe",
 *     });
 *     var exampleFactory = new Azure.DataFactory.Factory("example", new()
 *     {
 *         Name = "example",
 *         Location = example.Location,
 *         ResourceGroupName = example.Name,
 *     });
 *     var exampleLinkedServiceWeb = new Azure.DataFactory.LinkedServiceWeb("example", new()
 *     {
 *         Name = "example",
 *         DataFactoryId = exampleFactory.Id,
 *         AuthenticationType = "Anonymous",
 *         Url = "https://www.bing.com",
 *     });
 *     var exampleDatasetParquet = new Azure.DataFactory.DatasetParquet("example", new()
 *     {
 *         Name = "example",
 *         DataFactoryId = exampleFactory.Id,
 *         LinkedServiceName = exampleLinkedServiceWeb.Name,
 *         HttpServerLocation = new Azure.DataFactory.Inputs.DatasetParquetHttpServerLocationArgs
 *         {
 *             RelativeUrl = "http://www.bing.com",
 *             Path = "foo/bar/",
 *             Filename = "fizz.txt",
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/core"
 * 	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/datafactory"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		example, err := core.NewResourceGroup(ctx, "example", &core.ResourceGroupArgs{
 * 			Name:     pulumi.String("example-resources"),
 * 			Location: pulumi.String("West Europe"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		exampleFactory, err := datafactory.NewFactory(ctx, "example", &datafactory.FactoryArgs{
 * 			Name:              pulumi.String("example"),
 * 			Location:          example.Location,
 * 			ResourceGroupName: example.Name,
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		exampleLinkedServiceWeb, err := datafactory.NewLinkedServiceWeb(ctx, "example", &datafactory.LinkedServiceWebArgs{
 * 			Name:               pulumi.String("example"),
 * 			DataFactoryId:      exampleFactory.ID(),
 * 			AuthenticationType: pulumi.String("Anonymous"),
 * 			Url:                pulumi.String("https://www.bing.com"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = datafactory.NewDatasetParquet(ctx, "example", &datafactory.DatasetParquetArgs{
 * 			Name:              pulumi.String("example"),
 * 			DataFactoryId:     exampleFactory.ID(),
 * 			LinkedServiceName: exampleLinkedServiceWeb.Name,
 * 			HttpServerLocation: &datafactory.DatasetParquetHttpServerLocationArgs{
 * 				RelativeUrl: pulumi.String("http://www.bing.com"),
 * 				Path:        pulumi.String("foo/bar/"),
 * 				Filename:    pulumi.String("fizz.txt"),
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.azure.core.ResourceGroup;
 * import com.pulumi.azure.core.ResourceGroupArgs;
 * import com.pulumi.azure.datafactory.Factory;
 * import com.pulumi.azure.datafactory.FactoryArgs;
 * import com.pulumi.azure.datafactory.LinkedServiceWeb;
 * import com.pulumi.azure.datafactory.LinkedServiceWebArgs;
 * import com.pulumi.azure.datafactory.DatasetParquet;
 * import com.pulumi.azure.datafactory.DatasetParquetArgs;
 * import com.pulumi.azure.datafactory.inputs.DatasetParquetHttpServerLocationArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var example = new ResourceGroup("example", ResourceGroupArgs.builder()
 *             .name("example-resources")
 *             .location("West Europe")
 *             .build());
 *         var exampleFactory = new Factory("exampleFactory", FactoryArgs.builder()
 *             .name("example")
 *             .location(example.location())
 *             .resourceGroupName(example.name())
 *             .build());
 *         var exampleLinkedServiceWeb = new LinkedServiceWeb("exampleLinkedServiceWeb", LinkedServiceWebArgs.builder()
 *             .name("example")
 *             .dataFactoryId(exampleFactory.id())
 *             .authenticationType("Anonymous")
 *             .url("https://www.bing.com")
 *             .build());
 *         var exampleDatasetParquet = new DatasetParquet("exampleDatasetParquet", DatasetParquetArgs.builder()
 *             .name("example")
 *             .dataFactoryId(exampleFactory.id())
 *             .linkedServiceName(exampleLinkedServiceWeb.name())
 *             .httpServerLocation(DatasetParquetHttpServerLocationArgs.builder()
 *                 .relativeUrl("http://www.bing.com")
 *                 .path("foo/bar/")
 *                 .filename("fizz.txt")
 *                 .build())
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   example:
 *     type: azure:core:ResourceGroup
 *     properties:
 *       name: example-resources
 *       location: West Europe
 *   exampleFactory:
 *     type: azure:datafactory:Factory
 *     name: example
 *     properties:
 *       name: example
 *       location: ${example.location}
 *       resourceGroupName: ${example.name}
 *   exampleLinkedServiceWeb:
 *     type: azure:datafactory:LinkedServiceWeb
 *     name: example
 *     properties:
 *       name: example
 *       dataFactoryId: ${exampleFactory.id}
 *       authenticationType: Anonymous
 *       url: https://www.bing.com
 *   exampleDatasetParquet:
 *     type: azure:datafactory:DatasetParquet
 *     name: example
 *     properties:
 *       name: example
 *       dataFactoryId: ${exampleFactory.id}
 *       linkedServiceName: ${exampleLinkedServiceWeb.name}
 *       httpServerLocation:
 *         relativeUrl: http://www.bing.com
 *         path: foo/bar/
 *         filename: fizz.txt
 * ```
 * 
 * ## Import
 * Data Factory Datasets can be imported using the `resource id`, e.g.
 * ```sh
 * $ pulumi import azure:datafactory/datasetParquet:DatasetParquet example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/datasets/example
 * ```
 * @property additionalProperties A map of additional properties to associate with the Data Factory Dataset.
 * The following supported locations for a Parquet Dataset:
 * @property annotations List of tags that can be used for describing the Data Factory Dataset.
 * @property azureBlobFsLocation A `azure_blob_fs_location` block as defined below.
 * @property azureBlobStorageLocation A `azure_blob_storage_location` block as defined below.
 * The following supported arguments are specific to Parquet Dataset:
 * @property compressionCodec The compression codec used to read/write text files. Valid values are `bzip2`, `gzip`, `deflate`, `ZipDeflate`, `TarGzip`, `Tar`, `snappy`, or `lz4`. Please note these values are case-sensitive.
 * @property compressionLevel Specifies the compression level. Possible values are `Optimal` and `Fastest`,
 * @property dataFactoryId The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
 * @property description The description for the Data Factory Dataset.
 * @property folder The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
 * @property httpServerLocation A `http_server_location` block as defined below.
 * @property linkedServiceName The Data Factory Linked Service name in which to associate the Dataset with.
 * @property name Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the [Microsoft documentation](https://docs.microsoft.com/azure/data-factory/naming-rules) for all restrictions.
 * @property parameters A map of parameters to associate with the Data Factory Dataset.
 * @property schemaColumns A `schema_column` block as defined below.
 */
public data class DatasetParquetArgs(
    public val additionalProperties: Output>? = null,
    public val annotations: Output>? = null,
    public val azureBlobFsLocation: Output? = null,
    public val azureBlobStorageLocation: Output? = null,
    public val compressionCodec: Output? = null,
    public val compressionLevel: Output? = null,
    public val dataFactoryId: Output? = null,
    public val description: Output? = null,
    public val folder: Output? = null,
    public val httpServerLocation: Output? = null,
    public val linkedServiceName: Output? = null,
    public val name: Output? = null,
    public val parameters: Output>? = null,
    public val schemaColumns: Output>? = null,
) : ConvertibleToJava {
    override fun toJava(): com.pulumi.azure.datafactory.DatasetParquetArgs =
        com.pulumi.azure.datafactory.DatasetParquetArgs.builder()
            .additionalProperties(
                additionalProperties?.applyValue({ args0 ->
                    args0.map({ args0 ->
                        args0.key.to(args0.value)
                    }).toMap()
                }),
            )
            .annotations(annotations?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
            .azureBlobFsLocation(
                azureBlobFsLocation?.applyValue({ args0 ->
                    args0.let({ args0 ->
                        args0.toJava()
                    })
                }),
            )
            .azureBlobStorageLocation(
                azureBlobStorageLocation?.applyValue({ args0 ->
                    args0.let({ args0 ->
                        args0.toJava()
                    })
                }),
            )
            .compressionCodec(compressionCodec?.applyValue({ args0 -> args0 }))
            .compressionLevel(compressionLevel?.applyValue({ args0 -> args0 }))
            .dataFactoryId(dataFactoryId?.applyValue({ args0 -> args0 }))
            .description(description?.applyValue({ args0 -> args0 }))
            .folder(folder?.applyValue({ args0 -> args0 }))
            .httpServerLocation(
                httpServerLocation?.applyValue({ args0 ->
                    args0.let({ args0 ->
                        args0.toJava()
                    })
                }),
            )
            .linkedServiceName(linkedServiceName?.applyValue({ args0 -> args0 }))
            .name(name?.applyValue({ args0 -> args0 }))
            .parameters(
                parameters?.applyValue({ args0 ->
                    args0.map({ args0 ->
                        args0.key.to(args0.value)
                    }).toMap()
                }),
            )
            .schemaColumns(
                schemaColumns?.applyValue({ args0 ->
                    args0.map({ args0 ->
                        args0.let({ args0 ->
                            args0.toJava()
                        })
                    })
                }),
            ).build()
}

/**
 * Builder for [DatasetParquetArgs].
 */
@PulumiTagMarker
public class DatasetParquetArgsBuilder internal constructor() {
    private var additionalProperties: Output>? = null

    private var annotations: Output>? = null

    private var azureBlobFsLocation: Output? = null

    private var azureBlobStorageLocation: Output? = null

    private var compressionCodec: Output? = null

    private var compressionLevel: Output? = null

    private var dataFactoryId: Output? = null

    private var description: Output? = null

    private var folder: Output? = null

    private var httpServerLocation: Output? = null

    private var linkedServiceName: Output? = null

    private var name: Output? = null

    private var parameters: Output>? = null

    private var schemaColumns: Output>? = null

    /**
     * @param value A map of additional properties to associate with the Data Factory Dataset.
     * The following supported locations for a Parquet Dataset:
     */
    @JvmName("iueiprrivsxxkwvy")
    public suspend fun additionalProperties(`value`: Output>) {
        this.additionalProperties = value
    }

    /**
     * @param value List of tags that can be used for describing the Data Factory Dataset.
     */
    @JvmName("tjvhooqdfjixkpme")
    public suspend fun annotations(`value`: Output>) {
        this.annotations = value
    }

    @JvmName("rydhisgmnkxspxyh")
    public suspend fun annotations(vararg values: Output) {
        this.annotations = Output.all(values.asList())
    }

    /**
     * @param values List of tags that can be used for describing the Data Factory Dataset.
     */
    @JvmName("awxjqhebgriscjgl")
    public suspend fun annotations(values: List>) {
        this.annotations = Output.all(values)
    }

    /**
     * @param value A `azure_blob_fs_location` block as defined below.
     */
    @JvmName("kqrimfqnmftehwjb")
    public suspend fun azureBlobFsLocation(`value`: Output) {
        this.azureBlobFsLocation = value
    }

    /**
     * @param value A `azure_blob_storage_location` block as defined below.
     * The following supported arguments are specific to Parquet Dataset:
     */
    @JvmName("osxhaufrhrgushxa")
    public suspend fun azureBlobStorageLocation(`value`: Output) {
        this.azureBlobStorageLocation = value
    }

    /**
     * @param value The compression codec used to read/write text files. Valid values are `bzip2`, `gzip`, `deflate`, `ZipDeflate`, `TarGzip`, `Tar`, `snappy`, or `lz4`. Please note these values are case-sensitive.
     */
    @JvmName("louhpicfwdhyantx")
    public suspend fun compressionCodec(`value`: Output) {
        this.compressionCodec = value
    }

    /**
     * @param value Specifies the compression level. Possible values are `Optimal` and `Fastest`,
     */
    @JvmName("kocpfxkuvtlkoqrw")
    public suspend fun compressionLevel(`value`: Output) {
        this.compressionLevel = value
    }

    /**
     * @param value The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
     */
    @JvmName("iubpsbdjbrsegtwg")
    public suspend fun dataFactoryId(`value`: Output) {
        this.dataFactoryId = value
    }

    /**
     * @param value The description for the Data Factory Dataset.
     */
    @JvmName("xtoeafdbrqixcoyq")
    public suspend fun description(`value`: Output) {
        this.description = value
    }

    /**
     * @param value The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
     */
    @JvmName("xxeilssjoavmkahu")
    public suspend fun folder(`value`: Output) {
        this.folder = value
    }

    /**
     * @param value A `http_server_location` block as defined below.
     */
    @JvmName("roohkoboncahhjpn")
    public suspend fun httpServerLocation(`value`: Output) {
        this.httpServerLocation = value
    }

    /**
     * @param value The Data Factory Linked Service name in which to associate the Dataset with.
     */
    @JvmName("rjxemhobeopopicn")
    public suspend fun linkedServiceName(`value`: Output) {
        this.linkedServiceName = value
    }

    /**
     * @param value Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the [Microsoft documentation](https://docs.microsoft.com/azure/data-factory/naming-rules) for all restrictions.
     */
    @JvmName("otbwilttmrppfono")
    public suspend fun name(`value`: Output) {
        this.name = value
    }

    /**
     * @param value A map of parameters to associate with the Data Factory Dataset.
     */
    @JvmName("qlctrmhoefltgsik")
    public suspend fun parameters(`value`: Output>) {
        this.parameters = value
    }

    /**
     * @param value A `schema_column` block as defined below.
     */
    @JvmName("ocpmiblsjyecvuul")
    public suspend fun schemaColumns(`value`: Output>) {
        this.schemaColumns = value
    }

    @JvmName("tdbrfvfggtxcpvml")
    public suspend fun schemaColumns(vararg values: Output) {
        this.schemaColumns = Output.all(values.asList())
    }

    /**
     * @param values A `schema_column` block as defined below.
     */
    @JvmName("njxntvsuedbqftal")
    public suspend fun schemaColumns(values: List>) {
        this.schemaColumns = Output.all(values)
    }

    /**
     * @param value A map of additional properties to associate with the Data Factory Dataset.
     * The following supported locations for a Parquet Dataset:
     */
    @JvmName("htonudekojgqyvoc")
    public suspend fun additionalProperties(`value`: Map?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.additionalProperties = mapped
    }

    /**
     * @param values A map of additional properties to associate with the Data Factory Dataset.
     * The following supported locations for a Parquet Dataset:
     */
    @JvmName("youqphbooappuqdc")
    public fun additionalProperties(vararg values: Pair) {
        val toBeMapped = values.toMap()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.additionalProperties = mapped
    }

    /**
     * @param value List of tags that can be used for describing the Data Factory Dataset.
     */
    @JvmName("lujmclauccfbtrvf")
    public suspend fun annotations(`value`: List?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.annotations = mapped
    }

    /**
     * @param values List of tags that can be used for describing the Data Factory Dataset.
     */
    @JvmName("xyvslknvxnvelfli")
    public suspend fun annotations(vararg values: String) {
        val toBeMapped = values.toList()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.annotations = mapped
    }

    /**
     * @param value A `azure_blob_fs_location` block as defined below.
     */
    @JvmName("yxjejtpdaxooqggr")
    public suspend fun azureBlobFsLocation(`value`: DatasetParquetAzureBlobFsLocationArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.azureBlobFsLocation = mapped
    }

    /**
     * @param argument A `azure_blob_fs_location` block as defined below.
     */
    @JvmName("dtnvnahsvjicpmli")
    public suspend fun azureBlobFsLocation(argument: suspend DatasetParquetAzureBlobFsLocationArgsBuilder.() -> Unit) {
        val toBeMapped = DatasetParquetAzureBlobFsLocationArgsBuilder().applySuspend {
            argument()
        }.build()
        val mapped = of(toBeMapped)
        this.azureBlobFsLocation = mapped
    }

    /**
     * @param value A `azure_blob_storage_location` block as defined below.
     * The following supported arguments are specific to Parquet Dataset:
     */
    @JvmName("ocupbbptmsunqlio")
    public suspend fun azureBlobStorageLocation(`value`: DatasetParquetAzureBlobStorageLocationArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.azureBlobStorageLocation = mapped
    }

    /**
     * @param argument A `azure_blob_storage_location` block as defined below.
     * The following supported arguments are specific to Parquet Dataset:
     */
    @JvmName("oothrnkifqewmctr")
    public suspend fun azureBlobStorageLocation(argument: suspend DatasetParquetAzureBlobStorageLocationArgsBuilder.() -> Unit) {
        val toBeMapped = DatasetParquetAzureBlobStorageLocationArgsBuilder().applySuspend {
            argument()
        }.build()
        val mapped = of(toBeMapped)
        this.azureBlobStorageLocation = mapped
    }

    /**
     * @param value The compression codec used to read/write text files. Valid values are `bzip2`, `gzip`, `deflate`, `ZipDeflate`, `TarGzip`, `Tar`, `snappy`, or `lz4`. Please note these values are case-sensitive.
     */
    @JvmName("vgrchecbklmrvdpi")
    public suspend fun compressionCodec(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.compressionCodec = mapped
    }

    /**
     * @param value Specifies the compression level. Possible values are `Optimal` and `Fastest`,
     */
    @JvmName("dnblgubtbclximlq")
    public suspend fun compressionLevel(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.compressionLevel = mapped
    }

    /**
     * @param value The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
     */
    @JvmName("gclcoeftleplkfko")
    public suspend fun dataFactoryId(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.dataFactoryId = mapped
    }

    /**
     * @param value The description for the Data Factory Dataset.
     */
    @JvmName("klfxfoefasahrlsi")
    public suspend fun description(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.description = mapped
    }

    /**
     * @param value The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
     */
    @JvmName("cpiurhvaddjqqlda")
    public suspend fun folder(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.folder = mapped
    }

    /**
     * @param value A `http_server_location` block as defined below.
     */
    @JvmName("kbgpdduiwyxlgrku")
    public suspend fun httpServerLocation(`value`: DatasetParquetHttpServerLocationArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.httpServerLocation = mapped
    }

    /**
     * @param argument A `http_server_location` block as defined below.
     */
    @JvmName("xyeuyoeypcgsbxmn")
    public suspend fun httpServerLocation(argument: suspend DatasetParquetHttpServerLocationArgsBuilder.() -> Unit) {
        val toBeMapped = DatasetParquetHttpServerLocationArgsBuilder().applySuspend {
            argument()
        }.build()
        val mapped = of(toBeMapped)
        this.httpServerLocation = mapped
    }

    /**
     * @param value The Data Factory Linked Service name in which to associate the Dataset with.
     */
    @JvmName("jnnrnanylijtkhqd")
    public suspend fun linkedServiceName(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.linkedServiceName = mapped
    }

    /**
     * @param value Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the [Microsoft documentation](https://docs.microsoft.com/azure/data-factory/naming-rules) for all restrictions.
     */
    @JvmName("oyyekebmdmwhponb")
    public suspend fun name(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.name = mapped
    }

    /**
     * @param value A map of parameters to associate with the Data Factory Dataset.
     */
    @JvmName("ismosuijxbdylxtf")
    public suspend fun parameters(`value`: Map?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.parameters = mapped
    }

    /**
     * @param values A map of parameters to associate with the Data Factory Dataset.
     */
    @JvmName("kagjnyapnstewljm")
    public fun parameters(vararg values: Pair) {
        val toBeMapped = values.toMap()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.parameters = mapped
    }

    /**
     * @param value A `schema_column` block as defined below.
     */
    @JvmName("afxwrjtmvbbychdu")
    public suspend fun schemaColumns(`value`: List?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.schemaColumns = mapped
    }

    /**
     * @param argument A `schema_column` block as defined below.
     */
    @JvmName("wthatiytptdnjncr")
    public suspend fun schemaColumns(argument: List Unit>) {
        val toBeMapped = argument.toList().map {
            DatasetParquetSchemaColumnArgsBuilder().applySuspend {
                it()
            }.build()
        }
        val mapped = of(toBeMapped)
        this.schemaColumns = mapped
    }

    /**
     * @param argument A `schema_column` block as defined below.
     */
    @JvmName("gynscwdgqhsrwubv")
    public suspend fun schemaColumns(vararg argument: suspend DatasetParquetSchemaColumnArgsBuilder.() -> Unit) {
        val toBeMapped = argument.toList().map {
            DatasetParquetSchemaColumnArgsBuilder().applySuspend {
                it()
            }.build()
        }
        val mapped = of(toBeMapped)
        this.schemaColumns = mapped
    }

    /**
     * @param argument A `schema_column` block as defined below.
     */
    @JvmName("rdnvobiyndddlvvg")
    public suspend fun schemaColumns(argument: suspend DatasetParquetSchemaColumnArgsBuilder.() -> Unit) {
        val toBeMapped = listOf(
            DatasetParquetSchemaColumnArgsBuilder().applySuspend {
                argument()
            }.build(),
        )
        val mapped = of(toBeMapped)
        this.schemaColumns = mapped
    }

    /**
     * @param values A `schema_column` block as defined below.
     */
    @JvmName("ijlipvfvelbhimen")
    public suspend fun schemaColumns(vararg values: DatasetParquetSchemaColumnArgs) {
        val toBeMapped = values.toList()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.schemaColumns = mapped
    }

    internal fun build(): DatasetParquetArgs = DatasetParquetArgs(
        additionalProperties = additionalProperties,
        annotations = annotations,
        azureBlobFsLocation = azureBlobFsLocation,
        azureBlobStorageLocation = azureBlobStorageLocation,
        compressionCodec = compressionCodec,
        compressionLevel = compressionLevel,
        dataFactoryId = dataFactoryId,
        description = description,
        folder = folder,
        httpServerLocation = httpServerLocation,
        linkedServiceName = linkedServiceName,
        name = name,
        parameters = parameters,
        schemaColumns = schemaColumns,
    )
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy