All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.azure.datafactory.kotlin.FlowletDataFlowArgs.kt Maven / Gradle / Ivy

Go to download

Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.

There is a newer version: 6.14.0.0
Show newest version
@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.azure.datafactory.kotlin

import com.pulumi.azure.datafactory.FlowletDataFlowArgs.builder
import com.pulumi.azure.datafactory.kotlin.inputs.FlowletDataFlowSinkArgs
import com.pulumi.azure.datafactory.kotlin.inputs.FlowletDataFlowSinkArgsBuilder
import com.pulumi.azure.datafactory.kotlin.inputs.FlowletDataFlowSourceArgs
import com.pulumi.azure.datafactory.kotlin.inputs.FlowletDataFlowSourceArgsBuilder
import com.pulumi.azure.datafactory.kotlin.inputs.FlowletDataFlowTransformationArgs
import com.pulumi.azure.datafactory.kotlin.inputs.FlowletDataFlowTransformationArgsBuilder
import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.applySuspend
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.collections.List
import kotlin.jvm.JvmName

/**
 * Manages a Flowlet Data Flow inside an Azure Data Factory.
 * ## Example Usage
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as azure from "@pulumi/azure";
 * const example = new azure.core.ResourceGroup("example", {
 *     name: "example-resources",
 *     location: "West Europe",
 * });
 * const exampleAccount = new azure.storage.Account("example", {
 *     name: "example",
 *     location: example.location,
 *     resourceGroupName: example.name,
 *     accountTier: "Standard",
 *     accountReplicationType: "LRS",
 * });
 * const exampleFactory = new azure.datafactory.Factory("example", {
 *     name: "example",
 *     location: example.location,
 *     resourceGroupName: example.name,
 * });
 * const exampleLinkedCustomService = new azure.datafactory.LinkedCustomService("example", {
 *     name: "linked_service",
 *     dataFactoryId: exampleFactory.id,
 *     type: "AzureBlobStorage",
 *     typePropertiesJson: pulumi.interpolate`{
 *   "connectionString": "${exampleAccount.primaryConnectionString}"
 * }
 * `,
 * });
 * const example1 = new azure.datafactory.DatasetJson("example1", {
 *     name: "dataset1",
 *     dataFactoryId: exampleFactory.id,
 *     linkedServiceName: exampleLinkedCustomService.name,
 *     azureBlobStorageLocation: {
 *         container: "container",
 *         path: "foo/bar/",
 *         filename: "foo.txt",
 *     },
 *     encoding: "UTF-8",
 * });
 * const example2 = new azure.datafactory.DatasetJson("example2", {
 *     name: "dataset2",
 *     dataFactoryId: exampleFactory.id,
 *     linkedServiceName: exampleLinkedCustomService.name,
 *     azureBlobStorageLocation: {
 *         container: "container",
 *         path: "foo/bar/",
 *         filename: "bar.txt",
 *     },
 *     encoding: "UTF-8",
 * });
 * const example1FlowletDataFlow = new azure.datafactory.FlowletDataFlow("example1", {
 *     name: "example",
 *     dataFactoryId: exampleFactory.id,
 *     sources: [{
 *         name: "source1",
 *         linkedService: {
 *             name: exampleLinkedCustomService.name,
 *         },
 *     }],
 *     sinks: [{
 *         name: "sink1",
 *         linkedService: {
 *             name: exampleLinkedCustomService.name,
 *         },
 *     }],
 *     script: `source(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   limit: 100,
 *   ignoreNoFilesFound: false,
 *   documentForm: 'documentPerLine') ~> source1
 * source1 sink(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   skipDuplicateMapInputs: true,
 *   skipDuplicateMapOutputs: true) ~> sink1
 * `,
 * });
 * const example2FlowletDataFlow = new azure.datafactory.FlowletDataFlow("example2", {
 *     name: "example",
 *     dataFactoryId: exampleFactory.id,
 *     sources: [{
 *         name: "source1",
 *         linkedService: {
 *             name: exampleLinkedCustomService.name,
 *         },
 *     }],
 *     sinks: [{
 *         name: "sink1",
 *         linkedService: {
 *             name: exampleLinkedCustomService.name,
 *         },
 *     }],
 *     script: `source(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   limit: 100,
 *   ignoreNoFilesFound: false,
 *   documentForm: 'documentPerLine') ~> source1
 * source1 sink(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   skipDuplicateMapInputs: true,
 *   skipDuplicateMapOutputs: true) ~> sink1
 * `,
 * });
 * const exampleFlowletDataFlow = new azure.datafactory.FlowletDataFlow("example", {
 *     name: "example",
 *     dataFactoryId: exampleFactory.id,
 *     sources: [{
 *         name: "source1",
 *         flowlet: {
 *             name: example1FlowletDataFlow.name,
 *         },
 *         linkedService: {
 *             name: exampleLinkedCustomService.name,
 *         },
 *     }],
 *     sinks: [{
 *         name: "sink1",
 *         flowlet: {
 *             name: example2FlowletDataFlow.name,
 *         },
 *         linkedService: {
 *             name: exampleLinkedCustomService.name,
 *         },
 *     }],
 *     script: `source(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   limit: 100,
 *   ignoreNoFilesFound: false,
 *   documentForm: 'documentPerLine') ~> source1
 * source1 sink(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   skipDuplicateMapInputs: true,
 *   skipDuplicateMapOutputs: true) ~> sink1
 * `,
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_azure as azure
 * example = azure.core.ResourceGroup("example",
 *     name="example-resources",
 *     location="West Europe")
 * example_account = azure.storage.Account("example",
 *     name="example",
 *     location=example.location,
 *     resource_group_name=example.name,
 *     account_tier="Standard",
 *     account_replication_type="LRS")
 * example_factory = azure.datafactory.Factory("example",
 *     name="example",
 *     location=example.location,
 *     resource_group_name=example.name)
 * example_linked_custom_service = azure.datafactory.LinkedCustomService("example",
 *     name="linked_service",
 *     data_factory_id=example_factory.id,
 *     type="AzureBlobStorage",
 *     type_properties_json=example_account.primary_connection_string.apply(lambda primary_connection_string: f"""{{
 *   "connectionString": "{primary_connection_string}"
 * }}
 * """))
 * example1 = azure.datafactory.DatasetJson("example1",
 *     name="dataset1",
 *     data_factory_id=example_factory.id,
 *     linked_service_name=example_linked_custom_service.name,
 *     azure_blob_storage_location=azure.datafactory.DatasetJsonAzureBlobStorageLocationArgs(
 *         container="container",
 *         path="foo/bar/",
 *         filename="foo.txt",
 *     ),
 *     encoding="UTF-8")
 * example2 = azure.datafactory.DatasetJson("example2",
 *     name="dataset2",
 *     data_factory_id=example_factory.id,
 *     linked_service_name=example_linked_custom_service.name,
 *     azure_blob_storage_location=azure.datafactory.DatasetJsonAzureBlobStorageLocationArgs(
 *         container="container",
 *         path="foo/bar/",
 *         filename="bar.txt",
 *     ),
 *     encoding="UTF-8")
 * example1_flowlet_data_flow = azure.datafactory.FlowletDataFlow("example1",
 *     name="example",
 *     data_factory_id=example_factory.id,
 *     sources=[azure.datafactory.FlowletDataFlowSourceArgs(
 *         name="source1",
 *         linked_service=azure.datafactory.FlowletDataFlowSourceLinkedServiceArgs(
 *             name=example_linked_custom_service.name,
 *         ),
 *     )],
 *     sinks=[azure.datafactory.FlowletDataFlowSinkArgs(
 *         name="sink1",
 *         linked_service=azure.datafactory.FlowletDataFlowSinkLinkedServiceArgs(
 *             name=example_linked_custom_service.name,
 *         ),
 *     )],
 *     script="""source(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   limit: 100,
 *   ignoreNoFilesFound: false,
 *   documentForm: 'documentPerLine') ~> source1
 * source1 sink(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   skipDuplicateMapInputs: true,
 *   skipDuplicateMapOutputs: true) ~> sink1
 * """)
 * example2_flowlet_data_flow = azure.datafactory.FlowletDataFlow("example2",
 *     name="example",
 *     data_factory_id=example_factory.id,
 *     sources=[azure.datafactory.FlowletDataFlowSourceArgs(
 *         name="source1",
 *         linked_service=azure.datafactory.FlowletDataFlowSourceLinkedServiceArgs(
 *             name=example_linked_custom_service.name,
 *         ),
 *     )],
 *     sinks=[azure.datafactory.FlowletDataFlowSinkArgs(
 *         name="sink1",
 *         linked_service=azure.datafactory.FlowletDataFlowSinkLinkedServiceArgs(
 *             name=example_linked_custom_service.name,
 *         ),
 *     )],
 *     script="""source(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   limit: 100,
 *   ignoreNoFilesFound: false,
 *   documentForm: 'documentPerLine') ~> source1
 * source1 sink(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   skipDuplicateMapInputs: true,
 *   skipDuplicateMapOutputs: true) ~> sink1
 * """)
 * example_flowlet_data_flow = azure.datafactory.FlowletDataFlow("example",
 *     name="example",
 *     data_factory_id=example_factory.id,
 *     sources=[azure.datafactory.FlowletDataFlowSourceArgs(
 *         name="source1",
 *         flowlet=azure.datafactory.FlowletDataFlowSourceFlowletArgs(
 *             name=example1_flowlet_data_flow.name,
 *         ),
 *         linked_service=azure.datafactory.FlowletDataFlowSourceLinkedServiceArgs(
 *             name=example_linked_custom_service.name,
 *         ),
 *     )],
 *     sinks=[azure.datafactory.FlowletDataFlowSinkArgs(
 *         name="sink1",
 *         flowlet=azure.datafactory.FlowletDataFlowSinkFlowletArgs(
 *             name=example2_flowlet_data_flow.name,
 *         ),
 *         linked_service=azure.datafactory.FlowletDataFlowSinkLinkedServiceArgs(
 *             name=example_linked_custom_service.name,
 *         ),
 *     )],
 *     script="""source(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   limit: 100,
 *   ignoreNoFilesFound: false,
 *   documentForm: 'documentPerLine') ~> source1
 * source1 sink(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   skipDuplicateMapInputs: true,
 *   skipDuplicateMapOutputs: true) ~> sink1
 * """)
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Azure = Pulumi.Azure;
 * return await Deployment.RunAsync(() =>
 * {
 *     var example = new Azure.Core.ResourceGroup("example", new()
 *     {
 *         Name = "example-resources",
 *         Location = "West Europe",
 *     });
 *     var exampleAccount = new Azure.Storage.Account("example", new()
 *     {
 *         Name = "example",
 *         Location = example.Location,
 *         ResourceGroupName = example.Name,
 *         AccountTier = "Standard",
 *         AccountReplicationType = "LRS",
 *     });
 *     var exampleFactory = new Azure.DataFactory.Factory("example", new()
 *     {
 *         Name = "example",
 *         Location = example.Location,
 *         ResourceGroupName = example.Name,
 *     });
 *     var exampleLinkedCustomService = new Azure.DataFactory.LinkedCustomService("example", new()
 *     {
 *         Name = "linked_service",
 *         DataFactoryId = exampleFactory.Id,
 *         Type = "AzureBlobStorage",
 *         TypePropertiesJson = exampleAccount.PrimaryConnectionString.Apply(primaryConnectionString => @$"{{
 *   ""connectionString"": ""{primaryConnectionString}""
 * }}
 * "),
 *     });
 *     var example1 = new Azure.DataFactory.DatasetJson("example1", new()
 *     {
 *         Name = "dataset1",
 *         DataFactoryId = exampleFactory.Id,
 *         LinkedServiceName = exampleLinkedCustomService.Name,
 *         AzureBlobStorageLocation = new Azure.DataFactory.Inputs.DatasetJsonAzureBlobStorageLocationArgs
 *         {
 *             Container = "container",
 *             Path = "foo/bar/",
 *             Filename = "foo.txt",
 *         },
 *         Encoding = "UTF-8",
 *     });
 *     var example2 = new Azure.DataFactory.DatasetJson("example2", new()
 *     {
 *         Name = "dataset2",
 *         DataFactoryId = exampleFactory.Id,
 *         LinkedServiceName = exampleLinkedCustomService.Name,
 *         AzureBlobStorageLocation = new Azure.DataFactory.Inputs.DatasetJsonAzureBlobStorageLocationArgs
 *         {
 *             Container = "container",
 *             Path = "foo/bar/",
 *             Filename = "bar.txt",
 *         },
 *         Encoding = "UTF-8",
 *     });
 *     var example1FlowletDataFlow = new Azure.DataFactory.FlowletDataFlow("example1", new()
 *     {
 *         Name = "example",
 *         DataFactoryId = exampleFactory.Id,
 *         Sources = new[]
 *         {
 *             new Azure.DataFactory.Inputs.FlowletDataFlowSourceArgs
 *             {
 *                 Name = "source1",
 *                 LinkedService = new Azure.DataFactory.Inputs.FlowletDataFlowSourceLinkedServiceArgs
 *                 {
 *                     Name = exampleLinkedCustomService.Name,
 *                 },
 *             },
 *         },
 *         Sinks = new[]
 *         {
 *             new Azure.DataFactory.Inputs.FlowletDataFlowSinkArgs
 *             {
 *                 Name = "sink1",
 *                 LinkedService = new Azure.DataFactory.Inputs.FlowletDataFlowSinkLinkedServiceArgs
 *                 {
 *                     Name = exampleLinkedCustomService.Name,
 *                 },
 *             },
 *         },
 *         Script = @"source(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   limit: 100,
 *   ignoreNoFilesFound: false,
 *   documentForm: 'documentPerLine') ~> source1
 * source1 sink(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   skipDuplicateMapInputs: true,
 *   skipDuplicateMapOutputs: true) ~> sink1
 * ",
 *     });
 *     var example2FlowletDataFlow = new Azure.DataFactory.FlowletDataFlow("example2", new()
 *     {
 *         Name = "example",
 *         DataFactoryId = exampleFactory.Id,
 *         Sources = new[]
 *         {
 *             new Azure.DataFactory.Inputs.FlowletDataFlowSourceArgs
 *             {
 *                 Name = "source1",
 *                 LinkedService = new Azure.DataFactory.Inputs.FlowletDataFlowSourceLinkedServiceArgs
 *                 {
 *                     Name = exampleLinkedCustomService.Name,
 *                 },
 *             },
 *         },
 *         Sinks = new[]
 *         {
 *             new Azure.DataFactory.Inputs.FlowletDataFlowSinkArgs
 *             {
 *                 Name = "sink1",
 *                 LinkedService = new Azure.DataFactory.Inputs.FlowletDataFlowSinkLinkedServiceArgs
 *                 {
 *                     Name = exampleLinkedCustomService.Name,
 *                 },
 *             },
 *         },
 *         Script = @"source(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   limit: 100,
 *   ignoreNoFilesFound: false,
 *   documentForm: 'documentPerLine') ~> source1
 * source1 sink(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   skipDuplicateMapInputs: true,
 *   skipDuplicateMapOutputs: true) ~> sink1
 * ",
 *     });
 *     var exampleFlowletDataFlow = new Azure.DataFactory.FlowletDataFlow("example", new()
 *     {
 *         Name = "example",
 *         DataFactoryId = exampleFactory.Id,
 *         Sources = new[]
 *         {
 *             new Azure.DataFactory.Inputs.FlowletDataFlowSourceArgs
 *             {
 *                 Name = "source1",
 *                 Flowlet = new Azure.DataFactory.Inputs.FlowletDataFlowSourceFlowletArgs
 *                 {
 *                     Name = example1FlowletDataFlow.Name,
 *                 },
 *                 LinkedService = new Azure.DataFactory.Inputs.FlowletDataFlowSourceLinkedServiceArgs
 *                 {
 *                     Name = exampleLinkedCustomService.Name,
 *                 },
 *             },
 *         },
 *         Sinks = new[]
 *         {
 *             new Azure.DataFactory.Inputs.FlowletDataFlowSinkArgs
 *             {
 *                 Name = "sink1",
 *                 Flowlet = new Azure.DataFactory.Inputs.FlowletDataFlowSinkFlowletArgs
 *                 {
 *                     Name = example2FlowletDataFlow.Name,
 *                 },
 *                 LinkedService = new Azure.DataFactory.Inputs.FlowletDataFlowSinkLinkedServiceArgs
 *                 {
 *                     Name = exampleLinkedCustomService.Name,
 *                 },
 *             },
 *         },
 *         Script = @"source(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   limit: 100,
 *   ignoreNoFilesFound: false,
 *   documentForm: 'documentPerLine') ~> source1
 * source1 sink(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   skipDuplicateMapInputs: true,
 *   skipDuplicateMapOutputs: true) ~> sink1
 * ",
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"fmt"
 * 	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/core"
 * 	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/datafactory"
 * 	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/storage"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		example, err := core.NewResourceGroup(ctx, "example", &core.ResourceGroupArgs{
 * 			Name:     pulumi.String("example-resources"),
 * 			Location: pulumi.String("West Europe"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		exampleAccount, err := storage.NewAccount(ctx, "example", &storage.AccountArgs{
 * 			Name:                   pulumi.String("example"),
 * 			Location:               example.Location,
 * 			ResourceGroupName:      example.Name,
 * 			AccountTier:            pulumi.String("Standard"),
 * 			AccountReplicationType: pulumi.String("LRS"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		exampleFactory, err := datafactory.NewFactory(ctx, "example", &datafactory.FactoryArgs{
 * 			Name:              pulumi.String("example"),
 * 			Location:          example.Location,
 * 			ResourceGroupName: example.Name,
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		exampleLinkedCustomService, err := datafactory.NewLinkedCustomService(ctx, "example", &datafactory.LinkedCustomServiceArgs{
 * 			Name:          pulumi.String("linked_service"),
 * 			DataFactoryId: exampleFactory.ID(),
 * 			Type:          pulumi.String("AzureBlobStorage"),
 * 			TypePropertiesJson: exampleAccount.PrimaryConnectionString.ApplyT(func(primaryConnectionString string) (string, error) {
 * 				return fmt.Sprintf("{\n  \"connectionString\": \"%v\"\n}\n", primaryConnectionString), nil
 * 			}).(pulumi.StringOutput),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = datafactory.NewDatasetJson(ctx, "example1", &datafactory.DatasetJsonArgs{
 * 			Name:              pulumi.String("dataset1"),
 * 			DataFactoryId:     exampleFactory.ID(),
 * 			LinkedServiceName: exampleLinkedCustomService.Name,
 * 			AzureBlobStorageLocation: &datafactory.DatasetJsonAzureBlobStorageLocationArgs{
 * 				Container: pulumi.String("container"),
 * 				Path:      pulumi.String("foo/bar/"),
 * 				Filename:  pulumi.String("foo.txt"),
 * 			},
 * 			Encoding: pulumi.String("UTF-8"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = datafactory.NewDatasetJson(ctx, "example2", &datafactory.DatasetJsonArgs{
 * 			Name:              pulumi.String("dataset2"),
 * 			DataFactoryId:     exampleFactory.ID(),
 * 			LinkedServiceName: exampleLinkedCustomService.Name,
 * 			AzureBlobStorageLocation: &datafactory.DatasetJsonAzureBlobStorageLocationArgs{
 * 				Container: pulumi.String("container"),
 * 				Path:      pulumi.String("foo/bar/"),
 * 				Filename:  pulumi.String("bar.txt"),
 * 			},
 * 			Encoding: pulumi.String("UTF-8"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		example1FlowletDataFlow, err := datafactory.NewFlowletDataFlow(ctx, "example1", &datafactory.FlowletDataFlowArgs{
 * 			Name:          pulumi.String("example"),
 * 			DataFactoryId: exampleFactory.ID(),
 * 			Sources: datafactory.FlowletDataFlowSourceArray{
 * 				&datafactory.FlowletDataFlowSourceArgs{
 * 					Name: pulumi.String("source1"),
 * 					LinkedService: &datafactory.FlowletDataFlowSourceLinkedServiceArgs{
 * 						Name: exampleLinkedCustomService.Name,
 * 					},
 * 				},
 * 			},
 * 			Sinks: datafactory.FlowletDataFlowSinkArray{
 * 				&datafactory.FlowletDataFlowSinkArgs{
 * 					Name: pulumi.String("sink1"),
 * 					LinkedService: &datafactory.FlowletDataFlowSinkLinkedServiceArgs{
 * 						Name: exampleLinkedCustomService.Name,
 * 					},
 * 				},
 * 			},
 * 			Script: pulumi.String(`source(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   limit: 100,
 *   ignoreNoFilesFound: false,
 *   documentForm: 'documentPerLine') ~> source1
 * source1 sink(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   skipDuplicateMapInputs: true,
 *   skipDuplicateMapOutputs: true) ~> sink1
 * `),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		example2FlowletDataFlow, err := datafactory.NewFlowletDataFlow(ctx, "example2", &datafactory.FlowletDataFlowArgs{
 * 			Name:          pulumi.String("example"),
 * 			DataFactoryId: exampleFactory.ID(),
 * 			Sources: datafactory.FlowletDataFlowSourceArray{
 * 				&datafactory.FlowletDataFlowSourceArgs{
 * 					Name: pulumi.String("source1"),
 * 					LinkedService: &datafactory.FlowletDataFlowSourceLinkedServiceArgs{
 * 						Name: exampleLinkedCustomService.Name,
 * 					},
 * 				},
 * 			},
 * 			Sinks: datafactory.FlowletDataFlowSinkArray{
 * 				&datafactory.FlowletDataFlowSinkArgs{
 * 					Name: pulumi.String("sink1"),
 * 					LinkedService: &datafactory.FlowletDataFlowSinkLinkedServiceArgs{
 * 						Name: exampleLinkedCustomService.Name,
 * 					},
 * 				},
 * 			},
 * 			Script: pulumi.String(`source(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   limit: 100,
 *   ignoreNoFilesFound: false,
 *   documentForm: 'documentPerLine') ~> source1
 * source1 sink(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   skipDuplicateMapInputs: true,
 *   skipDuplicateMapOutputs: true) ~> sink1
 * `),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = datafactory.NewFlowletDataFlow(ctx, "example", &datafactory.FlowletDataFlowArgs{
 * 			Name:          pulumi.String("example"),
 * 			DataFactoryId: exampleFactory.ID(),
 * 			Sources: datafactory.FlowletDataFlowSourceArray{
 * 				&datafactory.FlowletDataFlowSourceArgs{
 * 					Name: pulumi.String("source1"),
 * 					Flowlet: &datafactory.FlowletDataFlowSourceFlowletArgs{
 * 						Name: example1FlowletDataFlow.Name,
 * 					},
 * 					LinkedService: &datafactory.FlowletDataFlowSourceLinkedServiceArgs{
 * 						Name: exampleLinkedCustomService.Name,
 * 					},
 * 				},
 * 			},
 * 			Sinks: datafactory.FlowletDataFlowSinkArray{
 * 				&datafactory.FlowletDataFlowSinkArgs{
 * 					Name: pulumi.String("sink1"),
 * 					Flowlet: &datafactory.FlowletDataFlowSinkFlowletArgs{
 * 						Name: example2FlowletDataFlow.Name,
 * 					},
 * 					LinkedService: &datafactory.FlowletDataFlowSinkLinkedServiceArgs{
 * 						Name: exampleLinkedCustomService.Name,
 * 					},
 * 				},
 * 			},
 * 			Script: pulumi.String(`source(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   limit: 100,
 *   ignoreNoFilesFound: false,
 *   documentForm: 'documentPerLine') ~> source1
 * source1 sink(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   skipDuplicateMapInputs: true,
 *   skipDuplicateMapOutputs: true) ~> sink1
 * `),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.azure.core.ResourceGroup;
 * import com.pulumi.azure.core.ResourceGroupArgs;
 * import com.pulumi.azure.storage.Account;
 * import com.pulumi.azure.storage.AccountArgs;
 * import com.pulumi.azure.datafactory.Factory;
 * import com.pulumi.azure.datafactory.FactoryArgs;
 * import com.pulumi.azure.datafactory.LinkedCustomService;
 * import com.pulumi.azure.datafactory.LinkedCustomServiceArgs;
 * import com.pulumi.azure.datafactory.DatasetJson;
 * import com.pulumi.azure.datafactory.DatasetJsonArgs;
 * import com.pulumi.azure.datafactory.inputs.DatasetJsonAzureBlobStorageLocationArgs;
 * import com.pulumi.azure.datafactory.FlowletDataFlow;
 * import com.pulumi.azure.datafactory.FlowletDataFlowArgs;
 * import com.pulumi.azure.datafactory.inputs.FlowletDataFlowSourceArgs;
 * import com.pulumi.azure.datafactory.inputs.FlowletDataFlowSourceLinkedServiceArgs;
 * import com.pulumi.azure.datafactory.inputs.FlowletDataFlowSinkArgs;
 * import com.pulumi.azure.datafactory.inputs.FlowletDataFlowSinkLinkedServiceArgs;
 * import com.pulumi.azure.datafactory.inputs.FlowletDataFlowSourceFlowletArgs;
 * import com.pulumi.azure.datafactory.inputs.FlowletDataFlowSinkFlowletArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var example = new ResourceGroup("example", ResourceGroupArgs.builder()
 *             .name("example-resources")
 *             .location("West Europe")
 *             .build());
 *         var exampleAccount = new Account("exampleAccount", AccountArgs.builder()
 *             .name("example")
 *             .location(example.location())
 *             .resourceGroupName(example.name())
 *             .accountTier("Standard")
 *             .accountReplicationType("LRS")
 *             .build());
 *         var exampleFactory = new Factory("exampleFactory", FactoryArgs.builder()
 *             .name("example")
 *             .location(example.location())
 *             .resourceGroupName(example.name())
 *             .build());
 *         var exampleLinkedCustomService = new LinkedCustomService("exampleLinkedCustomService", LinkedCustomServiceArgs.builder()
 *             .name("linked_service")
 *             .dataFactoryId(exampleFactory.id())
 *             .type("AzureBlobStorage")
 *             .typePropertiesJson(exampleAccount.primaryConnectionString().applyValue(primaryConnectionString -> """
 * {
 *   "connectionString": "%s"
 * }
 * ", primaryConnectionString)))
 *             .build());
 *         var example1 = new DatasetJson("example1", DatasetJsonArgs.builder()
 *             .name("dataset1")
 *             .dataFactoryId(exampleFactory.id())
 *             .linkedServiceName(exampleLinkedCustomService.name())
 *             .azureBlobStorageLocation(DatasetJsonAzureBlobStorageLocationArgs.builder()
 *                 .container("container")
 *                 .path("foo/bar/")
 *                 .filename("foo.txt")
 *                 .build())
 *             .encoding("UTF-8")
 *             .build());
 *         var example2 = new DatasetJson("example2", DatasetJsonArgs.builder()
 *             .name("dataset2")
 *             .dataFactoryId(exampleFactory.id())
 *             .linkedServiceName(exampleLinkedCustomService.name())
 *             .azureBlobStorageLocation(DatasetJsonAzureBlobStorageLocationArgs.builder()
 *                 .container("container")
 *                 .path("foo/bar/")
 *                 .filename("bar.txt")
 *                 .build())
 *             .encoding("UTF-8")
 *             .build());
 *         var example1FlowletDataFlow = new FlowletDataFlow("example1FlowletDataFlow", FlowletDataFlowArgs.builder()
 *             .name("example")
 *             .dataFactoryId(exampleFactory.id())
 *             .sources(FlowletDataFlowSourceArgs.builder()
 *                 .name("source1")
 *                 .linkedService(FlowletDataFlowSourceLinkedServiceArgs.builder()
 *                     .name(exampleLinkedCustomService.name())
 *                     .build())
 *                 .build())
 *             .sinks(FlowletDataFlowSinkArgs.builder()
 *                 .name("sink1")
 *                 .linkedService(FlowletDataFlowSinkLinkedServiceArgs.builder()
 *                     .name(exampleLinkedCustomService.name())
 *                     .build())
 *                 .build())
 *             .script("""
 * source(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   limit: 100,
 *   ignoreNoFilesFound: false,
 *   documentForm: 'documentPerLine') ~> source1
 * source1 sink(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   skipDuplicateMapInputs: true,
 *   skipDuplicateMapOutputs: true) ~> sink1
 *             """)
 *             .build());
 *         var example2FlowletDataFlow = new FlowletDataFlow("example2FlowletDataFlow", FlowletDataFlowArgs.builder()
 *             .name("example")
 *             .dataFactoryId(exampleFactory.id())
 *             .sources(FlowletDataFlowSourceArgs.builder()
 *                 .name("source1")
 *                 .linkedService(FlowletDataFlowSourceLinkedServiceArgs.builder()
 *                     .name(exampleLinkedCustomService.name())
 *                     .build())
 *                 .build())
 *             .sinks(FlowletDataFlowSinkArgs.builder()
 *                 .name("sink1")
 *                 .linkedService(FlowletDataFlowSinkLinkedServiceArgs.builder()
 *                     .name(exampleLinkedCustomService.name())
 *                     .build())
 *                 .build())
 *             .script("""
 * source(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   limit: 100,
 *   ignoreNoFilesFound: false,
 *   documentForm: 'documentPerLine') ~> source1
 * source1 sink(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   skipDuplicateMapInputs: true,
 *   skipDuplicateMapOutputs: true) ~> sink1
 *             """)
 *             .build());
 *         var exampleFlowletDataFlow = new FlowletDataFlow("exampleFlowletDataFlow", FlowletDataFlowArgs.builder()
 *             .name("example")
 *             .dataFactoryId(exampleFactory.id())
 *             .sources(FlowletDataFlowSourceArgs.builder()
 *                 .name("source1")
 *                 .flowlet(FlowletDataFlowSourceFlowletArgs.builder()
 *                     .name(example1FlowletDataFlow.name())
 *                     .build())
 *                 .linkedService(FlowletDataFlowSourceLinkedServiceArgs.builder()
 *                     .name(exampleLinkedCustomService.name())
 *                     .build())
 *                 .build())
 *             .sinks(FlowletDataFlowSinkArgs.builder()
 *                 .name("sink1")
 *                 .flowlet(FlowletDataFlowSinkFlowletArgs.builder()
 *                     .name(example2FlowletDataFlow.name())
 *                     .build())
 *                 .linkedService(FlowletDataFlowSinkLinkedServiceArgs.builder()
 *                     .name(exampleLinkedCustomService.name())
 *                     .build())
 *                 .build())
 *             .script("""
 * source(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   limit: 100,
 *   ignoreNoFilesFound: false,
 *   documentForm: 'documentPerLine') ~> source1
 * source1 sink(
 *   allowSchemaDrift: true,
 *   validateSchema: false,
 *   skipDuplicateMapInputs: true,
 *   skipDuplicateMapOutputs: true) ~> sink1
 *             """)
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   example:
 *     type: azure:core:ResourceGroup
 *     properties:
 *       name: example-resources
 *       location: West Europe
 *   exampleAccount:
 *     type: azure:storage:Account
 *     name: example
 *     properties:
 *       name: example
 *       location: ${example.location}
 *       resourceGroupName: ${example.name}
 *       accountTier: Standard
 *       accountReplicationType: LRS
 *   exampleFactory:
 *     type: azure:datafactory:Factory
 *     name: example
 *     properties:
 *       name: example
 *       location: ${example.location}
 *       resourceGroupName: ${example.name}
 *   exampleLinkedCustomService:
 *     type: azure:datafactory:LinkedCustomService
 *     name: example
 *     properties:
 *       name: linked_service
 *       dataFactoryId: ${exampleFactory.id}
 *       type: AzureBlobStorage
 *       typePropertiesJson: |
 *         {
 *           "connectionString": "${exampleAccount.primaryConnectionString}"
 *         }
 *   example1:
 *     type: azure:datafactory:DatasetJson
 *     properties:
 *       name: dataset1
 *       dataFactoryId: ${exampleFactory.id}
 *       linkedServiceName: ${exampleLinkedCustomService.name}
 *       azureBlobStorageLocation:
 *         container: container
 *         path: foo/bar/
 *         filename: foo.txt
 *       encoding: UTF-8
 *   example2:
 *     type: azure:datafactory:DatasetJson
 *     properties:
 *       name: dataset2
 *       dataFactoryId: ${exampleFactory.id}
 *       linkedServiceName: ${exampleLinkedCustomService.name}
 *       azureBlobStorageLocation:
 *         container: container
 *         path: foo/bar/
 *         filename: bar.txt
 *       encoding: UTF-8
 *   exampleFlowletDataFlow:
 *     type: azure:datafactory:FlowletDataFlow
 *     name: example
 *     properties:
 *       name: example
 *       dataFactoryId: ${exampleFactory.id}
 *       sources:
 *         - name: source1
 *           flowlet:
 *             name: ${example1FlowletDataFlow.name}
 *           linkedService:
 *             name: ${exampleLinkedCustomService.name}
 *       sinks:
 *         - name: sink1
 *           flowlet:
 *             name: ${example2FlowletDataFlow.name}
 *           linkedService:
 *             name: ${exampleLinkedCustomService.name}
 *       script: "source(\n  allowSchemaDrift: true, \n  validateSchema: false, \n  limit: 100, \n  ignoreNoFilesFound: false, \n  documentForm: 'documentPerLine') ~> source1 \nsource1 sink(\n  allowSchemaDrift: true, \n  validateSchema: false, \n  skipDuplicateMapInputs: true, \n  skipDuplicateMapOutputs: true) ~> sink1\n"
 *   example1FlowletDataFlow:
 *     type: azure:datafactory:FlowletDataFlow
 *     name: example1
 *     properties:
 *       name: example
 *       dataFactoryId: ${exampleFactory.id}
 *       sources:
 *         - name: source1
 *           linkedService:
 *             name: ${exampleLinkedCustomService.name}
 *       sinks:
 *         - name: sink1
 *           linkedService:
 *             name: ${exampleLinkedCustomService.name}
 *       script: "source(\n  allowSchemaDrift: true, \n  validateSchema: false, \n  limit: 100, \n  ignoreNoFilesFound: false, \n  documentForm: 'documentPerLine') ~> source1 \nsource1 sink(\n  allowSchemaDrift: true, \n  validateSchema: false, \n  skipDuplicateMapInputs: true, \n  skipDuplicateMapOutputs: true) ~> sink1\n"
 *   example2FlowletDataFlow:
 *     type: azure:datafactory:FlowletDataFlow
 *     name: example2
 *     properties:
 *       name: example
 *       dataFactoryId: ${exampleFactory.id}
 *       sources:
 *         - name: source1
 *           linkedService:
 *             name: ${exampleLinkedCustomService.name}
 *       sinks:
 *         - name: sink1
 *           linkedService:
 *             name: ${exampleLinkedCustomService.name}
 *       script: "source(\n  allowSchemaDrift: true, \n  validateSchema: false, \n  limit: 100, \n  ignoreNoFilesFound: false, \n  documentForm: 'documentPerLine') ~> source1 \nsource1 sink(\n  allowSchemaDrift: true, \n  validateSchema: false, \n  skipDuplicateMapInputs: true, \n  skipDuplicateMapOutputs: true) ~> sink1\n"
 * ```
 * 
 * ## Import
 * Data Factory Flowlet Data Flow can be imported using the `resource id`, e.g.
 * ```sh
 * $ pulumi import azure:datafactory/flowletDataFlow:FlowletDataFlow example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/dataflows/example
 * ```
 * @property annotations List of tags that can be used for describing the Data Factory Flowlet Data Flow.
 * @property dataFactoryId The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource.
 * @property description The description for the Data Factory Flowlet Data Flow.
 * @property folder The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level.
 * @property name Specifies the name of the Data Factory Flowlet Data Flow. Changing this forces a new resource to be created.
 * @property script The script for the Data Factory Flowlet Data Flow.
 * @property scriptLines The script lines for the Data Factory Flowlet Data Flow.
 * @property sinks One or more `sink` blocks as defined below.
 * @property sources One or more `source` blocks as defined below.
 * @property transformations One or more `transformation` blocks as defined below.
 */
public data class FlowletDataFlowArgs(
    public val annotations: Output>? = null,
    public val dataFactoryId: Output? = null,
    public val description: Output? = null,
    public val folder: Output? = null,
    public val name: Output? = null,
    public val script: Output? = null,
    public val scriptLines: Output>? = null,
    public val sinks: Output>? = null,
    public val sources: Output>? = null,
    public val transformations: Output>? = null,
) : ConvertibleToJava {
    override fun toJava(): com.pulumi.azure.datafactory.FlowletDataFlowArgs =
        com.pulumi.azure.datafactory.FlowletDataFlowArgs.builder()
            .annotations(annotations?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
            .dataFactoryId(dataFactoryId?.applyValue({ args0 -> args0 }))
            .description(description?.applyValue({ args0 -> args0 }))
            .folder(folder?.applyValue({ args0 -> args0 }))
            .name(name?.applyValue({ args0 -> args0 }))
            .script(script?.applyValue({ args0 -> args0 }))
            .scriptLines(scriptLines?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
            .sinks(sinks?.applyValue({ args0 -> args0.map({ args0 -> args0.let({ args0 -> args0.toJava() }) }) }))
            .sources(
                sources?.applyValue({ args0 ->
                    args0.map({ args0 ->
                        args0.let({ args0 ->
                            args0.toJava()
                        })
                    })
                }),
            )
            .transformations(
                transformations?.applyValue({ args0 ->
                    args0.map({ args0 ->
                        args0.let({ args0 ->
                            args0.toJava()
                        })
                    })
                }),
            ).build()
}

/**
 * Builder for [FlowletDataFlowArgs].
 */
@PulumiTagMarker
public class FlowletDataFlowArgsBuilder internal constructor() {
    private var annotations: Output>? = null

    private var dataFactoryId: Output? = null

    private var description: Output? = null

    private var folder: Output? = null

    private var name: Output? = null

    private var script: Output? = null

    private var scriptLines: Output>? = null

    private var sinks: Output>? = null

    private var sources: Output>? = null

    private var transformations: Output>? = null

    /**
     * @param value List of tags that can be used for describing the Data Factory Flowlet Data Flow.
     */
    @JvmName("gooiueolmaljryuu")
    public suspend fun annotations(`value`: Output>) {
        this.annotations = value
    }

    @JvmName("nkpqhvbmhsiboixk")
    public suspend fun annotations(vararg values: Output) {
        this.annotations = Output.all(values.asList())
    }

    /**
     * @param values List of tags that can be used for describing the Data Factory Flowlet Data Flow.
     */
    @JvmName("eqllemxlefyrecrw")
    public suspend fun annotations(values: List>) {
        this.annotations = Output.all(values)
    }

    /**
     * @param value The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource.
     */
    @JvmName("dsupxdjetasgdyeq")
    public suspend fun dataFactoryId(`value`: Output) {
        this.dataFactoryId = value
    }

    /**
     * @param value The description for the Data Factory Flowlet Data Flow.
     */
    @JvmName("amdapmcortmiwdlq")
    public suspend fun description(`value`: Output) {
        this.description = value
    }

    /**
     * @param value The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level.
     */
    @JvmName("leefhrylxclmqxml")
    public suspend fun folder(`value`: Output) {
        this.folder = value
    }

    /**
     * @param value Specifies the name of the Data Factory Flowlet Data Flow. Changing this forces a new resource to be created.
     */
    @JvmName("jqxldhtyntrbvkym")
    public suspend fun name(`value`: Output) {
        this.name = value
    }

    /**
     * @param value The script for the Data Factory Flowlet Data Flow.
     */
    @JvmName("yakojmpxmpglcpgr")
    public suspend fun script(`value`: Output) {
        this.script = value
    }

    /**
     * @param value The script lines for the Data Factory Flowlet Data Flow.
     */
    @JvmName("poqvqskofifyaisy")
    public suspend fun scriptLines(`value`: Output>) {
        this.scriptLines = value
    }

    @JvmName("gdbudcrlntdsaoaj")
    public suspend fun scriptLines(vararg values: Output) {
        this.scriptLines = Output.all(values.asList())
    }

    /**
     * @param values The script lines for the Data Factory Flowlet Data Flow.
     */
    @JvmName("lgofbgidwcmcrvjo")
    public suspend fun scriptLines(values: List>) {
        this.scriptLines = Output.all(values)
    }

    /**
     * @param value One or more `sink` blocks as defined below.
     */
    @JvmName("ddyfsdibyacesyix")
    public suspend fun sinks(`value`: Output>) {
        this.sinks = value
    }

    @JvmName("smavjemgrygcsjea")
    public suspend fun sinks(vararg values: Output) {
        this.sinks = Output.all(values.asList())
    }

    /**
     * @param values One or more `sink` blocks as defined below.
     */
    @JvmName("isbxryvvdxpkratn")
    public suspend fun sinks(values: List>) {
        this.sinks = Output.all(values)
    }

    /**
     * @param value One or more `source` blocks as defined below.
     */
    @JvmName("kamejsfuhuoibdtk")
    public suspend fun sources(`value`: Output>) {
        this.sources = value
    }

    @JvmName("jukgrplubugofrna")
    public suspend fun sources(vararg values: Output) {
        this.sources = Output.all(values.asList())
    }

    /**
     * @param values One or more `source` blocks as defined below.
     */
    @JvmName("ilimjslpngtgfmue")
    public suspend fun sources(values: List>) {
        this.sources = Output.all(values)
    }

    /**
     * @param value One or more `transformation` blocks as defined below.
     */
    @JvmName("lxqpcyjrqgpglitt")
    public suspend fun transformations(`value`: Output>) {
        this.transformations = value
    }

    @JvmName("jgalasymhawwawsq")
    public suspend fun transformations(vararg values: Output) {
        this.transformations = Output.all(values.asList())
    }

    /**
     * @param values One or more `transformation` blocks as defined below.
     */
    @JvmName("cwectloaibvicaoe")
    public suspend fun transformations(values: List>) {
        this.transformations = Output.all(values)
    }

    /**
     * @param value List of tags that can be used for describing the Data Factory Flowlet Data Flow.
     */
    @JvmName("siutyxsesovkisdm")
    public suspend fun annotations(`value`: List?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.annotations = mapped
    }

    /**
     * @param values List of tags that can be used for describing the Data Factory Flowlet Data Flow.
     */
    @JvmName("terwscjeqgdugwmm")
    public suspend fun annotations(vararg values: String) {
        val toBeMapped = values.toList()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.annotations = mapped
    }

    /**
     * @param value The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource.
     */
    @JvmName("pqaptnvyoypliooq")
    public suspend fun dataFactoryId(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.dataFactoryId = mapped
    }

    /**
     * @param value The description for the Data Factory Flowlet Data Flow.
     */
    @JvmName("kalcovlgkojqtumf")
    public suspend fun description(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.description = mapped
    }

    /**
     * @param value The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level.
     */
    @JvmName("dskxdkxijtdgoyls")
    public suspend fun folder(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.folder = mapped
    }

    /**
     * @param value Specifies the name of the Data Factory Flowlet Data Flow. Changing this forces a new resource to be created.
     */
    @JvmName("dxlsyjtbecepdtew")
    public suspend fun name(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.name = mapped
    }

    /**
     * @param value The script for the Data Factory Flowlet Data Flow.
     */
    @JvmName("lkrmvtrpmpucweor")
    public suspend fun script(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.script = mapped
    }

    /**
     * @param value The script lines for the Data Factory Flowlet Data Flow.
     */
    @JvmName("tevbodvinsggtbba")
    public suspend fun scriptLines(`value`: List?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.scriptLines = mapped
    }

    /**
     * @param values The script lines for the Data Factory Flowlet Data Flow.
     */
    @JvmName("vblvvjoujfanbdid")
    public suspend fun scriptLines(vararg values: String) {
        val toBeMapped = values.toList()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.scriptLines = mapped
    }

    /**
     * @param value One or more `sink` blocks as defined below.
     */
    @JvmName("egbdbjhfgjgmmljs")
    public suspend fun sinks(`value`: List?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.sinks = mapped
    }

    /**
     * @param argument One or more `sink` blocks as defined below.
     */
    @JvmName("meyiiwcxkrakwhyj")
    public suspend fun sinks(argument: List Unit>) {
        val toBeMapped = argument.toList().map {
            FlowletDataFlowSinkArgsBuilder().applySuspend {
                it()
            }.build()
        }
        val mapped = of(toBeMapped)
        this.sinks = mapped
    }

    /**
     * @param argument One or more `sink` blocks as defined below.
     */
    @JvmName("vdjclpgsqjwdcxrt")
    public suspend fun sinks(vararg argument: suspend FlowletDataFlowSinkArgsBuilder.() -> Unit) {
        val toBeMapped = argument.toList().map {
            FlowletDataFlowSinkArgsBuilder().applySuspend {
                it()
            }.build()
        }
        val mapped = of(toBeMapped)
        this.sinks = mapped
    }

    /**
     * @param argument One or more `sink` blocks as defined below.
     */
    @JvmName("ncbhthnldklklxwr")
    public suspend fun sinks(argument: suspend FlowletDataFlowSinkArgsBuilder.() -> Unit) {
        val toBeMapped = listOf(FlowletDataFlowSinkArgsBuilder().applySuspend { argument() }.build())
        val mapped = of(toBeMapped)
        this.sinks = mapped
    }

    /**
     * @param values One or more `sink` blocks as defined below.
     */
    @JvmName("hjyxksfejmlgdgvw")
    public suspend fun sinks(vararg values: FlowletDataFlowSinkArgs) {
        val toBeMapped = values.toList()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.sinks = mapped
    }

    /**
     * @param value One or more `source` blocks as defined below.
     */
    @JvmName("wvdbghfkpkpshsxp")
    public suspend fun sources(`value`: List?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.sources = mapped
    }

    /**
     * @param argument One or more `source` blocks as defined below.
     */
    @JvmName("lgndhxofobqwfjfp")
    public suspend fun sources(argument: List Unit>) {
        val toBeMapped = argument.toList().map {
            FlowletDataFlowSourceArgsBuilder().applySuspend {
                it()
            }.build()
        }
        val mapped = of(toBeMapped)
        this.sources = mapped
    }

    /**
     * @param argument One or more `source` blocks as defined below.
     */
    @JvmName("arrrhipdhlfyjmwy")
    public suspend fun sources(vararg argument: suspend FlowletDataFlowSourceArgsBuilder.() -> Unit) {
        val toBeMapped = argument.toList().map {
            FlowletDataFlowSourceArgsBuilder().applySuspend {
                it()
            }.build()
        }
        val mapped = of(toBeMapped)
        this.sources = mapped
    }

    /**
     * @param argument One or more `source` blocks as defined below.
     */
    @JvmName("lojqdsaxrqcgoblq")
    public suspend fun sources(argument: suspend FlowletDataFlowSourceArgsBuilder.() -> Unit) {
        val toBeMapped = listOf(FlowletDataFlowSourceArgsBuilder().applySuspend { argument() }.build())
        val mapped = of(toBeMapped)
        this.sources = mapped
    }

    /**
     * @param values One or more `source` blocks as defined below.
     */
    @JvmName("yayoyqgqytpptqoh")
    public suspend fun sources(vararg values: FlowletDataFlowSourceArgs) {
        val toBeMapped = values.toList()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.sources = mapped
    }

    /**
     * @param value One or more `transformation` blocks as defined below.
     */
    @JvmName("klkwditfwoubyfes")
    public suspend fun transformations(`value`: List?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.transformations = mapped
    }

    /**
     * @param argument One or more `transformation` blocks as defined below.
     */
    @JvmName("eaquivwxktcjdrxo")
    public suspend
    fun transformations(argument: List Unit>) {
        val toBeMapped = argument.toList().map {
            FlowletDataFlowTransformationArgsBuilder().applySuspend { it() }.build()
        }
        val mapped = of(toBeMapped)
        this.transformations = mapped
    }

    /**
     * @param argument One or more `transformation` blocks as defined below.
     */
    @JvmName("njxefmdwlonvxioh")
    public suspend fun transformations(
        vararg
        argument: suspend FlowletDataFlowTransformationArgsBuilder.() -> Unit,
    ) {
        val toBeMapped = argument.toList().map {
            FlowletDataFlowTransformationArgsBuilder().applySuspend { it() }.build()
        }
        val mapped = of(toBeMapped)
        this.transformations = mapped
    }

    /**
     * @param argument One or more `transformation` blocks as defined below.
     */
    @JvmName("mmqcdljcyxeyerpp")
    public suspend
    fun transformations(argument: suspend FlowletDataFlowTransformationArgsBuilder.() -> Unit) {
        val toBeMapped = listOf(
            FlowletDataFlowTransformationArgsBuilder().applySuspend {
                argument()
            }.build(),
        )
        val mapped = of(toBeMapped)
        this.transformations = mapped
    }

    /**
     * @param values One or more `transformation` blocks as defined below.
     */
    @JvmName("vssqovtjxaegutxu")
    public suspend fun transformations(vararg values: FlowletDataFlowTransformationArgs) {
        val toBeMapped = values.toList()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.transformations = mapped
    }

    internal fun build(): FlowletDataFlowArgs = FlowletDataFlowArgs(
        annotations = annotations,
        dataFactoryId = dataFactoryId,
        description = description,
        folder = folder,
        name = name,
        script = script,
        scriptLines = scriptLines,
        sinks = sinks,
        sources = sources,
        transformations = transformations,
    )
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy