All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.azure.datafactory.kotlin.DatasetDelimitedTextArgs.kt Maven / Gradle / Ivy

Go to download

Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.

There is a newer version: 6.15.0.0
Show newest version
@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.azure.datafactory.kotlin

import com.pulumi.azure.datafactory.DatasetDelimitedTextArgs.builder
import com.pulumi.azure.datafactory.kotlin.inputs.DatasetDelimitedTextAzureBlobFsLocationArgs
import com.pulumi.azure.datafactory.kotlin.inputs.DatasetDelimitedTextAzureBlobFsLocationArgsBuilder
import com.pulumi.azure.datafactory.kotlin.inputs.DatasetDelimitedTextAzureBlobStorageLocationArgs
import com.pulumi.azure.datafactory.kotlin.inputs.DatasetDelimitedTextAzureBlobStorageLocationArgsBuilder
import com.pulumi.azure.datafactory.kotlin.inputs.DatasetDelimitedTextHttpServerLocationArgs
import com.pulumi.azure.datafactory.kotlin.inputs.DatasetDelimitedTextHttpServerLocationArgsBuilder
import com.pulumi.azure.datafactory.kotlin.inputs.DatasetDelimitedTextSchemaColumnArgs
import com.pulumi.azure.datafactory.kotlin.inputs.DatasetDelimitedTextSchemaColumnArgsBuilder
import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.applySuspend
import kotlin.Boolean
import kotlin.Pair
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.collections.List
import kotlin.collections.Map
import kotlin.jvm.JvmName

/**
 * Manages an Azure Delimited Text Dataset inside an Azure Data Factory.
 * ## Example Usage
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as azure from "@pulumi/azure";
 * const example = new azure.core.ResourceGroup("example", {
 *     name: "example-resources",
 *     location: "West Europe",
 * });
 * const exampleFactory = new azure.datafactory.Factory("example", {
 *     name: "example",
 *     location: example.location,
 *     resourceGroupName: example.name,
 * });
 * const exampleLinkedServiceWeb = new azure.datafactory.LinkedServiceWeb("example", {
 *     name: "example",
 *     dataFactoryId: exampleFactory.id,
 *     authenticationType: "Anonymous",
 *     url: "https://www.bing.com",
 * });
 * const exampleDatasetDelimitedText = new azure.datafactory.DatasetDelimitedText("example", {
 *     name: "example",
 *     dataFactoryId: exampleFactory.id,
 *     linkedServiceName: exampleLinkedServiceWeb.name,
 *     httpServerLocation: {
 *         relativeUrl: "http://www.bing.com",
 *         path: "foo/bar/",
 *         filename: "fizz.txt",
 *     },
 *     columnDelimiter: ",",
 *     rowDelimiter: "NEW",
 *     encoding: "UTF-8",
 *     quoteCharacter: "x",
 *     escapeCharacter: "f",
 *     firstRowAsHeader: true,
 *     nullValue: "NULL",
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_azure as azure
 * example = azure.core.ResourceGroup("example",
 *     name="example-resources",
 *     location="West Europe")
 * example_factory = azure.datafactory.Factory("example",
 *     name="example",
 *     location=example.location,
 *     resource_group_name=example.name)
 * example_linked_service_web = azure.datafactory.LinkedServiceWeb("example",
 *     name="example",
 *     data_factory_id=example_factory.id,
 *     authentication_type="Anonymous",
 *     url="https://www.bing.com")
 * example_dataset_delimited_text = azure.datafactory.DatasetDelimitedText("example",
 *     name="example",
 *     data_factory_id=example_factory.id,
 *     linked_service_name=example_linked_service_web.name,
 *     http_server_location=azure.datafactory.DatasetDelimitedTextHttpServerLocationArgs(
 *         relative_url="http://www.bing.com",
 *         path="foo/bar/",
 *         filename="fizz.txt",
 *     ),
 *     column_delimiter=",",
 *     row_delimiter="NEW",
 *     encoding="UTF-8",
 *     quote_character="x",
 *     escape_character="f",
 *     first_row_as_header=True,
 *     null_value="NULL")
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Azure = Pulumi.Azure;
 * return await Deployment.RunAsync(() =>
 * {
 *     var example = new Azure.Core.ResourceGroup("example", new()
 *     {
 *         Name = "example-resources",
 *         Location = "West Europe",
 *     });
 *     var exampleFactory = new Azure.DataFactory.Factory("example", new()
 *     {
 *         Name = "example",
 *         Location = example.Location,
 *         ResourceGroupName = example.Name,
 *     });
 *     var exampleLinkedServiceWeb = new Azure.DataFactory.LinkedServiceWeb("example", new()
 *     {
 *         Name = "example",
 *         DataFactoryId = exampleFactory.Id,
 *         AuthenticationType = "Anonymous",
 *         Url = "https://www.bing.com",
 *     });
 *     var exampleDatasetDelimitedText = new Azure.DataFactory.DatasetDelimitedText("example", new()
 *     {
 *         Name = "example",
 *         DataFactoryId = exampleFactory.Id,
 *         LinkedServiceName = exampleLinkedServiceWeb.Name,
 *         HttpServerLocation = new Azure.DataFactory.Inputs.DatasetDelimitedTextHttpServerLocationArgs
 *         {
 *             RelativeUrl = "http://www.bing.com",
 *             Path = "foo/bar/",
 *             Filename = "fizz.txt",
 *         },
 *         ColumnDelimiter = ",",
 *         RowDelimiter = "NEW",
 *         Encoding = "UTF-8",
 *         QuoteCharacter = "x",
 *         EscapeCharacter = "f",
 *         FirstRowAsHeader = true,
 *         NullValue = "NULL",
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/core"
 * 	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/datafactory"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		example, err := core.NewResourceGroup(ctx, "example", &core.ResourceGroupArgs{
 * 			Name:     pulumi.String("example-resources"),
 * 			Location: pulumi.String("West Europe"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		exampleFactory, err := datafactory.NewFactory(ctx, "example", &datafactory.FactoryArgs{
 * 			Name:              pulumi.String("example"),
 * 			Location:          example.Location,
 * 			ResourceGroupName: example.Name,
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		exampleLinkedServiceWeb, err := datafactory.NewLinkedServiceWeb(ctx, "example", &datafactory.LinkedServiceWebArgs{
 * 			Name:               pulumi.String("example"),
 * 			DataFactoryId:      exampleFactory.ID(),
 * 			AuthenticationType: pulumi.String("Anonymous"),
 * 			Url:                pulumi.String("https://www.bing.com"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = datafactory.NewDatasetDelimitedText(ctx, "example", &datafactory.DatasetDelimitedTextArgs{
 * 			Name:              pulumi.String("example"),
 * 			DataFactoryId:     exampleFactory.ID(),
 * 			LinkedServiceName: exampleLinkedServiceWeb.Name,
 * 			HttpServerLocation: &datafactory.DatasetDelimitedTextHttpServerLocationArgs{
 * 				RelativeUrl: pulumi.String("http://www.bing.com"),
 * 				Path:        pulumi.String("foo/bar/"),
 * 				Filename:    pulumi.String("fizz.txt"),
 * 			},
 * 			ColumnDelimiter:  pulumi.String(","),
 * 			RowDelimiter:     pulumi.String("NEW"),
 * 			Encoding:         pulumi.String("UTF-8"),
 * 			QuoteCharacter:   pulumi.String("x"),
 * 			EscapeCharacter:  pulumi.String("f"),
 * 			FirstRowAsHeader: pulumi.Bool(true),
 * 			NullValue:        pulumi.String("NULL"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.azure.core.ResourceGroup;
 * import com.pulumi.azure.core.ResourceGroupArgs;
 * import com.pulumi.azure.datafactory.Factory;
 * import com.pulumi.azure.datafactory.FactoryArgs;
 * import com.pulumi.azure.datafactory.LinkedServiceWeb;
 * import com.pulumi.azure.datafactory.LinkedServiceWebArgs;
 * import com.pulumi.azure.datafactory.DatasetDelimitedText;
 * import com.pulumi.azure.datafactory.DatasetDelimitedTextArgs;
 * import com.pulumi.azure.datafactory.inputs.DatasetDelimitedTextHttpServerLocationArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var example = new ResourceGroup("example", ResourceGroupArgs.builder()
 *             .name("example-resources")
 *             .location("West Europe")
 *             .build());
 *         var exampleFactory = new Factory("exampleFactory", FactoryArgs.builder()
 *             .name("example")
 *             .location(example.location())
 *             .resourceGroupName(example.name())
 *             .build());
 *         var exampleLinkedServiceWeb = new LinkedServiceWeb("exampleLinkedServiceWeb", LinkedServiceWebArgs.builder()
 *             .name("example")
 *             .dataFactoryId(exampleFactory.id())
 *             .authenticationType("Anonymous")
 *             .url("https://www.bing.com")
 *             .build());
 *         var exampleDatasetDelimitedText = new DatasetDelimitedText("exampleDatasetDelimitedText", DatasetDelimitedTextArgs.builder()
 *             .name("example")
 *             .dataFactoryId(exampleFactory.id())
 *             .linkedServiceName(exampleLinkedServiceWeb.name())
 *             .httpServerLocation(DatasetDelimitedTextHttpServerLocationArgs.builder()
 *                 .relativeUrl("http://www.bing.com")
 *                 .path("foo/bar/")
 *                 .filename("fizz.txt")
 *                 .build())
 *             .columnDelimiter(",")
 *             .rowDelimiter("NEW")
 *             .encoding("UTF-8")
 *             .quoteCharacter("x")
 *             .escapeCharacter("f")
 *             .firstRowAsHeader(true)
 *             .nullValue("NULL")
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   example:
 *     type: azure:core:ResourceGroup
 *     properties:
 *       name: example-resources
 *       location: West Europe
 *   exampleFactory:
 *     type: azure:datafactory:Factory
 *     name: example
 *     properties:
 *       name: example
 *       location: ${example.location}
 *       resourceGroupName: ${example.name}
 *   exampleLinkedServiceWeb:
 *     type: azure:datafactory:LinkedServiceWeb
 *     name: example
 *     properties:
 *       name: example
 *       dataFactoryId: ${exampleFactory.id}
 *       authenticationType: Anonymous
 *       url: https://www.bing.com
 *   exampleDatasetDelimitedText:
 *     type: azure:datafactory:DatasetDelimitedText
 *     name: example
 *     properties:
 *       name: example
 *       dataFactoryId: ${exampleFactory.id}
 *       linkedServiceName: ${exampleLinkedServiceWeb.name}
 *       httpServerLocation:
 *         relativeUrl: http://www.bing.com
 *         path: foo/bar/
 *         filename: fizz.txt
 *       columnDelimiter: ','
 *       rowDelimiter: NEW
 *       encoding: UTF-8
 *       quoteCharacter: x
 *       escapeCharacter: f
 *       firstRowAsHeader: true
 *       nullValue: NULL
 * ```
 * 
 * ## Import
 * Data Factory Datasets can be imported using the `resource id`, e.g.
 * ```sh
 * $ pulumi import azure:datafactory/datasetDelimitedText:DatasetDelimitedText example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/datasets/example
 * ```
 * @property additionalProperties A map of additional properties to associate with the Data Factory Dataset.
 * The following supported locations for a Delimited Text Dataset (exactly one of them must be set):
 * @property annotations List of tags that can be used for describing the Data Factory Dataset.
 * @property azureBlobFsLocation An `azure_blob_fs_location` block as defined below.
 * @property azureBlobStorageLocation An `azure_blob_storage_location` block as defined below.
 * @property columnDelimiter The column delimiter. Defaults to `,`.
 * @property compressionCodec The compression codec used to read/write text files. Valid values are `None`, `bzip2`, `gzip`, `deflate`, `ZipDeflate`, `TarGzip`, `Tar`, `snappy` and `lz4`. Please note these values are case sensitive.
 * @property compressionLevel The compression ratio for the Data Factory Dataset. Valid values are `Fastest` or `Optimal`. Please note these values are case sensitive.
 * @property dataFactoryId The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
 * @property description The description for the Data Factory Dataset.
 * @property encoding The encoding format for the file.
 * @property escapeCharacter The escape character. Defaults to `\`.
 * @property firstRowAsHeader When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to `false`.
 * @property folder The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
 * @property httpServerLocation A `http_server_location` block as defined below.
 * The following supported arguments are specific to Delimited Text Dataset:
 * @property linkedServiceName The Data Factory Linked Service name in which to associate the Dataset with.
 * @property name Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the [Microsoft documentation](https://docs.microsoft.com/azure/data-factory/naming-rules) for all restrictions.
 * @property nullValue The null value string. Defaults to `""`.
 * @property parameters A map of parameters to associate with the Data Factory Dataset.
 * @property quoteCharacter The quote character. Defaults to `"`.
 * @property rowDelimiter The row delimiter. Defaults to any of the following values on read: `\r\n`, `\r`, `\n`, and `\n` or `\r\n` on write by mapping data flow and Copy activity respectively.
 * @property schemaColumns A `schema_column` block as defined below.
 */
public data class DatasetDelimitedTextArgs(
    public val additionalProperties: Output>? = null,
    public val annotations: Output>? = null,
    public val azureBlobFsLocation: Output? = null,
    public val azureBlobStorageLocation: Output? =
        null,
    public val columnDelimiter: Output? = null,
    public val compressionCodec: Output? = null,
    public val compressionLevel: Output? = null,
    public val dataFactoryId: Output? = null,
    public val description: Output? = null,
    public val encoding: Output? = null,
    public val escapeCharacter: Output? = null,
    public val firstRowAsHeader: Output? = null,
    public val folder: Output? = null,
    public val httpServerLocation: Output? = null,
    public val linkedServiceName: Output? = null,
    public val name: Output? = null,
    public val nullValue: Output? = null,
    public val parameters: Output>? = null,
    public val quoteCharacter: Output? = null,
    public val rowDelimiter: Output? = null,
    public val schemaColumns: Output>? = null,
) : ConvertibleToJava {
    override fun toJava(): com.pulumi.azure.datafactory.DatasetDelimitedTextArgs =
        com.pulumi.azure.datafactory.DatasetDelimitedTextArgs.builder()
            .additionalProperties(
                additionalProperties?.applyValue({ args0 ->
                    args0.map({ args0 ->
                        args0.key.to(args0.value)
                    }).toMap()
                }),
            )
            .annotations(annotations?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
            .azureBlobFsLocation(
                azureBlobFsLocation?.applyValue({ args0 ->
                    args0.let({ args0 ->
                        args0.toJava()
                    })
                }),
            )
            .azureBlobStorageLocation(
                azureBlobStorageLocation?.applyValue({ args0 ->
                    args0.let({ args0 ->
                        args0.toJava()
                    })
                }),
            )
            .columnDelimiter(columnDelimiter?.applyValue({ args0 -> args0 }))
            .compressionCodec(compressionCodec?.applyValue({ args0 -> args0 }))
            .compressionLevel(compressionLevel?.applyValue({ args0 -> args0 }))
            .dataFactoryId(dataFactoryId?.applyValue({ args0 -> args0 }))
            .description(description?.applyValue({ args0 -> args0 }))
            .encoding(encoding?.applyValue({ args0 -> args0 }))
            .escapeCharacter(escapeCharacter?.applyValue({ args0 -> args0 }))
            .firstRowAsHeader(firstRowAsHeader?.applyValue({ args0 -> args0 }))
            .folder(folder?.applyValue({ args0 -> args0 }))
            .httpServerLocation(
                httpServerLocation?.applyValue({ args0 ->
                    args0.let({ args0 ->
                        args0.toJava()
                    })
                }),
            )
            .linkedServiceName(linkedServiceName?.applyValue({ args0 -> args0 }))
            .name(name?.applyValue({ args0 -> args0 }))
            .nullValue(nullValue?.applyValue({ args0 -> args0 }))
            .parameters(
                parameters?.applyValue({ args0 ->
                    args0.map({ args0 ->
                        args0.key.to(args0.value)
                    }).toMap()
                }),
            )
            .quoteCharacter(quoteCharacter?.applyValue({ args0 -> args0 }))
            .rowDelimiter(rowDelimiter?.applyValue({ args0 -> args0 }))
            .schemaColumns(
                schemaColumns?.applyValue({ args0 ->
                    args0.map({ args0 ->
                        args0.let({ args0 ->
                            args0.toJava()
                        })
                    })
                }),
            ).build()
}

/**
 * Builder for [DatasetDelimitedTextArgs].
 */
@PulumiTagMarker
public class DatasetDelimitedTextArgsBuilder internal constructor() {
    private var additionalProperties: Output>? = null

    private var annotations: Output>? = null

    private var azureBlobFsLocation: Output? = null

    private var azureBlobStorageLocation: Output? =
        null

    private var columnDelimiter: Output? = null

    private var compressionCodec: Output? = null

    private var compressionLevel: Output? = null

    private var dataFactoryId: Output? = null

    private var description: Output? = null

    private var encoding: Output? = null

    private var escapeCharacter: Output? = null

    private var firstRowAsHeader: Output? = null

    private var folder: Output? = null

    private var httpServerLocation: Output? = null

    private var linkedServiceName: Output? = null

    private var name: Output? = null

    private var nullValue: Output? = null

    private var parameters: Output>? = null

    private var quoteCharacter: Output? = null

    private var rowDelimiter: Output? = null

    private var schemaColumns: Output>? = null

    /**
     * @param value A map of additional properties to associate with the Data Factory Dataset.
     * The following supported locations for a Delimited Text Dataset (exactly one of them must be set):
     */
    @JvmName("qrnrasdgjtsrssno")
    public suspend fun additionalProperties(`value`: Output>) {
        this.additionalProperties = value
    }

    /**
     * @param value List of tags that can be used for describing the Data Factory Dataset.
     */
    @JvmName("molyrqaljxgprlpd")
    public suspend fun annotations(`value`: Output>) {
        this.annotations = value
    }

    @JvmName("fwielshdjdeainrn")
    public suspend fun annotations(vararg values: Output) {
        this.annotations = Output.all(values.asList())
    }

    /**
     * @param values List of tags that can be used for describing the Data Factory Dataset.
     */
    @JvmName("tdmupyoyyefvuafu")
    public suspend fun annotations(values: List>) {
        this.annotations = Output.all(values)
    }

    /**
     * @param value An `azure_blob_fs_location` block as defined below.
     */
    @JvmName("lexnpksgkyhbkwhu")
    public suspend
    fun azureBlobFsLocation(`value`: Output) {
        this.azureBlobFsLocation = value
    }

    /**
     * @param value An `azure_blob_storage_location` block as defined below.
     */
    @JvmName("sapbwssxhoqcutop")
    public suspend
    fun azureBlobStorageLocation(`value`: Output) {
        this.azureBlobStorageLocation = value
    }

    /**
     * @param value The column delimiter. Defaults to `,`.
     */
    @JvmName("rpgapnysorrkiwkf")
    public suspend fun columnDelimiter(`value`: Output) {
        this.columnDelimiter = value
    }

    /**
     * @param value The compression codec used to read/write text files. Valid values are `None`, `bzip2`, `gzip`, `deflate`, `ZipDeflate`, `TarGzip`, `Tar`, `snappy` and `lz4`. Please note these values are case sensitive.
     */
    @JvmName("umqtraloshaafkcm")
    public suspend fun compressionCodec(`value`: Output) {
        this.compressionCodec = value
    }

    /**
     * @param value The compression ratio for the Data Factory Dataset. Valid values are `Fastest` or `Optimal`. Please note these values are case sensitive.
     */
    @JvmName("wcfjtxudjawnfhkm")
    public suspend fun compressionLevel(`value`: Output) {
        this.compressionLevel = value
    }

    /**
     * @param value The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
     */
    @JvmName("fixwsahihowhstfw")
    public suspend fun dataFactoryId(`value`: Output) {
        this.dataFactoryId = value
    }

    /**
     * @param value The description for the Data Factory Dataset.
     */
    @JvmName("eqmwfmcdpliaamxj")
    public suspend fun description(`value`: Output) {
        this.description = value
    }

    /**
     * @param value The encoding format for the file.
     */
    @JvmName("eiudgbmvavgdlpxu")
    public suspend fun encoding(`value`: Output) {
        this.encoding = value
    }

    /**
     * @param value The escape character. Defaults to `\`.
     */
    @JvmName("pulfsoxbrffffbam")
    public suspend fun escapeCharacter(`value`: Output) {
        this.escapeCharacter = value
    }

    /**
     * @param value When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to `false`.
     */
    @JvmName("kudfjfcnoeldaoas")
    public suspend fun firstRowAsHeader(`value`: Output) {
        this.firstRowAsHeader = value
    }

    /**
     * @param value The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
     */
    @JvmName("jlivvmihncmixyvx")
    public suspend fun folder(`value`: Output) {
        this.folder = value
    }

    /**
     * @param value A `http_server_location` block as defined below.
     * The following supported arguments are specific to Delimited Text Dataset:
     */
    @JvmName("iandolhsdjavoros")
    public suspend
    fun httpServerLocation(`value`: Output) {
        this.httpServerLocation = value
    }

    /**
     * @param value The Data Factory Linked Service name in which to associate the Dataset with.
     */
    @JvmName("hpyksukochlfvlsr")
    public suspend fun linkedServiceName(`value`: Output) {
        this.linkedServiceName = value
    }

    /**
     * @param value Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the [Microsoft documentation](https://docs.microsoft.com/azure/data-factory/naming-rules) for all restrictions.
     */
    @JvmName("ktgoixggfnmohxus")
    public suspend fun name(`value`: Output) {
        this.name = value
    }

    /**
     * @param value The null value string. Defaults to `""`.
     */
    @JvmName("tqevlexfonbkrlgq")
    public suspend fun nullValue(`value`: Output) {
        this.nullValue = value
    }

    /**
     * @param value A map of parameters to associate with the Data Factory Dataset.
     */
    @JvmName("tvoicyqdhpibkgfx")
    public suspend fun parameters(`value`: Output>) {
        this.parameters = value
    }

    /**
     * @param value The quote character. Defaults to `"`.
     */
    @JvmName("wposmqbmdvgkcbgs")
    public suspend fun quoteCharacter(`value`: Output) {
        this.quoteCharacter = value
    }

    /**
     * @param value The row delimiter. Defaults to any of the following values on read: `\r\n`, `\r`, `\n`, and `\n` or `\r\n` on write by mapping data flow and Copy activity respectively.
     */
    @JvmName("clipufhuuwmrxlkw")
    public suspend fun rowDelimiter(`value`: Output) {
        this.rowDelimiter = value
    }

    /**
     * @param value A `schema_column` block as defined below.
     */
    @JvmName("nivjvixledprvdxs")
    public suspend fun schemaColumns(`value`: Output>) {
        this.schemaColumns = value
    }

    @JvmName("niakoxpuowjeaykk")
    public suspend fun schemaColumns(vararg values: Output) {
        this.schemaColumns = Output.all(values.asList())
    }

    /**
     * @param values A `schema_column` block as defined below.
     */
    @JvmName("mnckegnklmjjchpi")
    public suspend fun schemaColumns(values: List>) {
        this.schemaColumns = Output.all(values)
    }

    /**
     * @param value A map of additional properties to associate with the Data Factory Dataset.
     * The following supported locations for a Delimited Text Dataset (exactly one of them must be set):
     */
    @JvmName("jttmwuawhyrqhxby")
    public suspend fun additionalProperties(`value`: Map?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.additionalProperties = mapped
    }

    /**
     * @param values A map of additional properties to associate with the Data Factory Dataset.
     * The following supported locations for a Delimited Text Dataset (exactly one of them must be set):
     */
    @JvmName("cwcmxgyllxkeljol")
    public fun additionalProperties(vararg values: Pair) {
        val toBeMapped = values.toMap()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.additionalProperties = mapped
    }

    /**
     * @param value List of tags that can be used for describing the Data Factory Dataset.
     */
    @JvmName("ukhlajfwnehjemnw")
    public suspend fun annotations(`value`: List?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.annotations = mapped
    }

    /**
     * @param values List of tags that can be used for describing the Data Factory Dataset.
     */
    @JvmName("dpfwhrsqhrqhgwsj")
    public suspend fun annotations(vararg values: String) {
        val toBeMapped = values.toList()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.annotations = mapped
    }

    /**
     * @param value An `azure_blob_fs_location` block as defined below.
     */
    @JvmName("ojhahuxieedggmcn")
    public suspend fun azureBlobFsLocation(`value`: DatasetDelimitedTextAzureBlobFsLocationArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.azureBlobFsLocation = mapped
    }

    /**
     * @param argument An `azure_blob_fs_location` block as defined below.
     */
    @JvmName("vqvdvcqufmimtiyo")
    public suspend
    fun azureBlobFsLocation(argument: suspend DatasetDelimitedTextAzureBlobFsLocationArgsBuilder.() -> Unit) {
        val toBeMapped = DatasetDelimitedTextAzureBlobFsLocationArgsBuilder().applySuspend {
            argument()
        }.build()
        val mapped = of(toBeMapped)
        this.azureBlobFsLocation = mapped
    }

    /**
     * @param value An `azure_blob_storage_location` block as defined below.
     */
    @JvmName("oeraeyvsndshqetn")
    public suspend
    fun azureBlobStorageLocation(`value`: DatasetDelimitedTextAzureBlobStorageLocationArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.azureBlobStorageLocation = mapped
    }

    /**
     * @param argument An `azure_blob_storage_location` block as defined below.
     */
    @JvmName("dbtnqkbumpjqnivb")
    public suspend
    fun azureBlobStorageLocation(argument: suspend DatasetDelimitedTextAzureBlobStorageLocationArgsBuilder.() -> Unit) {
        val toBeMapped = DatasetDelimitedTextAzureBlobStorageLocationArgsBuilder().applySuspend {
            argument()
        }.build()
        val mapped = of(toBeMapped)
        this.azureBlobStorageLocation = mapped
    }

    /**
     * @param value The column delimiter. Defaults to `,`.
     */
    @JvmName("miahtmoprbabobrx")
    public suspend fun columnDelimiter(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.columnDelimiter = mapped
    }

    /**
     * @param value The compression codec used to read/write text files. Valid values are `None`, `bzip2`, `gzip`, `deflate`, `ZipDeflate`, `TarGzip`, `Tar`, `snappy` and `lz4`. Please note these values are case sensitive.
     */
    @JvmName("wvbvdnmbxulibsvf")
    public suspend fun compressionCodec(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.compressionCodec = mapped
    }

    /**
     * @param value The compression ratio for the Data Factory Dataset. Valid values are `Fastest` or `Optimal`. Please note these values are case sensitive.
     */
    @JvmName("cyrygcyebooyqwbi")
    public suspend fun compressionLevel(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.compressionLevel = mapped
    }

    /**
     * @param value The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
     */
    @JvmName("hrxnyoripaphggvg")
    public suspend fun dataFactoryId(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.dataFactoryId = mapped
    }

    /**
     * @param value The description for the Data Factory Dataset.
     */
    @JvmName("lvrgtnfffpbowwfc")
    public suspend fun description(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.description = mapped
    }

    /**
     * @param value The encoding format for the file.
     */
    @JvmName("wxelojhdcxkutyhk")
    public suspend fun encoding(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.encoding = mapped
    }

    /**
     * @param value The escape character. Defaults to `\`.
     */
    @JvmName("tbxvbjwnosldidto")
    public suspend fun escapeCharacter(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.escapeCharacter = mapped
    }

    /**
     * @param value When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to `false`.
     */
    @JvmName("cpeifweqrkebtduq")
    public suspend fun firstRowAsHeader(`value`: Boolean?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.firstRowAsHeader = mapped
    }

    /**
     * @param value The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
     */
    @JvmName("tphhpdtpuqhubqdy")
    public suspend fun folder(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.folder = mapped
    }

    /**
     * @param value A `http_server_location` block as defined below.
     * The following supported arguments are specific to Delimited Text Dataset:
     */
    @JvmName("ryghcjmmldxkurcp")
    public suspend fun httpServerLocation(`value`: DatasetDelimitedTextHttpServerLocationArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.httpServerLocation = mapped
    }

    /**
     * @param argument A `http_server_location` block as defined below.
     * The following supported arguments are specific to Delimited Text Dataset:
     */
    @JvmName("qrswqoowdibctskl")
    public suspend
    fun httpServerLocation(argument: suspend DatasetDelimitedTextHttpServerLocationArgsBuilder.() -> Unit) {
        val toBeMapped = DatasetDelimitedTextHttpServerLocationArgsBuilder().applySuspend {
            argument()
        }.build()
        val mapped = of(toBeMapped)
        this.httpServerLocation = mapped
    }

    /**
     * @param value The Data Factory Linked Service name in which to associate the Dataset with.
     */
    @JvmName("mihknhekebxrpdso")
    public suspend fun linkedServiceName(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.linkedServiceName = mapped
    }

    /**
     * @param value Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the [Microsoft documentation](https://docs.microsoft.com/azure/data-factory/naming-rules) for all restrictions.
     */
    @JvmName("dntlhekjhsutehav")
    public suspend fun name(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.name = mapped
    }

    /**
     * @param value The null value string. Defaults to `""`.
     */
    @JvmName("bovrefcqqmfeaehc")
    public suspend fun nullValue(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.nullValue = mapped
    }

    /**
     * @param value A map of parameters to associate with the Data Factory Dataset.
     */
    @JvmName("kasrsjcgnxqoqtxp")
    public suspend fun parameters(`value`: Map?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.parameters = mapped
    }

    /**
     * @param values A map of parameters to associate with the Data Factory Dataset.
     */
    @JvmName("udbfcilcpccgomfg")
    public fun parameters(vararg values: Pair) {
        val toBeMapped = values.toMap()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.parameters = mapped
    }

    /**
     * @param value The quote character. Defaults to `"`.
     */
    @JvmName("dlcygdkekloxwkds")
    public suspend fun quoteCharacter(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.quoteCharacter = mapped
    }

    /**
     * @param value The row delimiter. Defaults to any of the following values on read: `\r\n`, `\r`, `\n`, and `\n` or `\r\n` on write by mapping data flow and Copy activity respectively.
     */
    @JvmName("dirdifxvhaiibvjf")
    public suspend fun rowDelimiter(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.rowDelimiter = mapped
    }

    /**
     * @param value A `schema_column` block as defined below.
     */
    @JvmName("mlgswconnniwmenp")
    public suspend fun schemaColumns(`value`: List?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.schemaColumns = mapped
    }

    /**
     * @param argument A `schema_column` block as defined below.
     */
    @JvmName("dslkpcpptnbidoju")
    public suspend
    fun schemaColumns(argument: List Unit>) {
        val toBeMapped = argument.toList().map {
            DatasetDelimitedTextSchemaColumnArgsBuilder().applySuspend { it() }.build()
        }
        val mapped = of(toBeMapped)
        this.schemaColumns = mapped
    }

    /**
     * @param argument A `schema_column` block as defined below.
     */
    @JvmName("hcjlgphwfhhwlugr")
    public suspend fun schemaColumns(
        vararg
        argument: suspend DatasetDelimitedTextSchemaColumnArgsBuilder.() -> Unit,
    ) {
        val toBeMapped = argument.toList().map {
            DatasetDelimitedTextSchemaColumnArgsBuilder().applySuspend { it() }.build()
        }
        val mapped = of(toBeMapped)
        this.schemaColumns = mapped
    }

    /**
     * @param argument A `schema_column` block as defined below.
     */
    @JvmName("mklxqbvoynnfouqy")
    public suspend
    fun schemaColumns(argument: suspend DatasetDelimitedTextSchemaColumnArgsBuilder.() -> Unit) {
        val toBeMapped = listOf(
            DatasetDelimitedTextSchemaColumnArgsBuilder().applySuspend {
                argument()
            }.build(),
        )
        val mapped = of(toBeMapped)
        this.schemaColumns = mapped
    }

    /**
     * @param values A `schema_column` block as defined below.
     */
    @JvmName("xixvcrmuoofcgbas")
    public suspend fun schemaColumns(vararg values: DatasetDelimitedTextSchemaColumnArgs) {
        val toBeMapped = values.toList()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.schemaColumns = mapped
    }

    internal fun build(): DatasetDelimitedTextArgs = DatasetDelimitedTextArgs(
        additionalProperties = additionalProperties,
        annotations = annotations,
        azureBlobFsLocation = azureBlobFsLocation,
        azureBlobStorageLocation = azureBlobStorageLocation,
        columnDelimiter = columnDelimiter,
        compressionCodec = compressionCodec,
        compressionLevel = compressionLevel,
        dataFactoryId = dataFactoryId,
        description = description,
        encoding = encoding,
        escapeCharacter = escapeCharacter,
        firstRowAsHeader = firstRowAsHeader,
        folder = folder,
        httpServerLocation = httpServerLocation,
        linkedServiceName = linkedServiceName,
        name = name,
        nullValue = nullValue,
        parameters = parameters,
        quoteCharacter = quoteCharacter,
        rowDelimiter = rowDelimiter,
        schemaColumns = schemaColumns,
    )
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy