
com.pulumi.azure.datafactory.kotlin.FlowletDataFlow.kt Maven / Gradle / Ivy
@file:Suppress("NAME_SHADOWING", "DEPRECATION")
package com.pulumi.azure.datafactory.kotlin
import com.pulumi.azure.datafactory.kotlin.outputs.FlowletDataFlowSink
import com.pulumi.azure.datafactory.kotlin.outputs.FlowletDataFlowSource
import com.pulumi.azure.datafactory.kotlin.outputs.FlowletDataFlowTransformation
import com.pulumi.core.Output
import com.pulumi.kotlin.KotlinCustomResource
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.ResourceMapper
import com.pulumi.kotlin.options.CustomResourceOptions
import com.pulumi.kotlin.options.CustomResourceOptionsBuilder
import com.pulumi.resources.Resource
import kotlin.Boolean
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.collections.List
import com.pulumi.azure.datafactory.kotlin.outputs.FlowletDataFlowSink.Companion.toKotlin as flowletDataFlowSinkToKotlin
import com.pulumi.azure.datafactory.kotlin.outputs.FlowletDataFlowSource.Companion.toKotlin as flowletDataFlowSourceToKotlin
import com.pulumi.azure.datafactory.kotlin.outputs.FlowletDataFlowTransformation.Companion.toKotlin as flowletDataFlowTransformationToKotlin
/**
* Builder for [FlowletDataFlow].
*/
@PulumiTagMarker
public class FlowletDataFlowResourceBuilder internal constructor() {
public var name: String? = null
public var args: FlowletDataFlowArgs = FlowletDataFlowArgs()
public var opts: CustomResourceOptions = CustomResourceOptions()
/**
* @param name The _unique_ name of the resulting resource.
*/
public fun name(`value`: String) {
this.name = value
}
/**
* @param block The arguments to use to populate this resource's properties.
*/
public suspend fun args(block: suspend FlowletDataFlowArgsBuilder.() -> Unit) {
val builder = FlowletDataFlowArgsBuilder()
block(builder)
this.args = builder.build()
}
/**
* @param block A bag of options that control this resource's behavior.
*/
public suspend fun opts(block: suspend CustomResourceOptionsBuilder.() -> Unit) {
this.opts = com.pulumi.kotlin.options.CustomResourceOptions.opts(block)
}
internal fun build(): FlowletDataFlow {
val builtJavaResource = com.pulumi.azure.datafactory.FlowletDataFlow(
this.name,
this.args.toJava(),
this.opts.toJava(),
)
return FlowletDataFlow(builtJavaResource)
}
}
/**
* Manages a Flowlet Data Flow inside an Azure Data Factory.
* ## Example Usage
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as azure from "@pulumi/azure";
* const example = new azure.core.ResourceGroup("example", {
* name: "example-resources",
* location: "West Europe",
* });
* const exampleAccount = new azure.storage.Account("example", {
* name: "example",
* location: example.location,
* resourceGroupName: example.name,
* accountTier: "Standard",
* accountReplicationType: "LRS",
* });
* const exampleFactory = new azure.datafactory.Factory("example", {
* name: "example",
* location: example.location,
* resourceGroupName: example.name,
* });
* const exampleLinkedCustomService = new azure.datafactory.LinkedCustomService("example", {
* name: "linked_service",
* dataFactoryId: exampleFactory.id,
* type: "AzureBlobStorage",
* typePropertiesJson: pulumi.interpolate`{
* "connectionString": "${exampleAccount.primaryConnectionString}"
* }
* `,
* });
* const example1 = new azure.datafactory.DatasetJson("example1", {
* name: "dataset1",
* dataFactoryId: exampleFactory.id,
* linkedServiceName: exampleLinkedCustomService.name,
* azureBlobStorageLocation: {
* container: "container",
* path: "foo/bar/",
* filename: "foo.txt",
* },
* encoding: "UTF-8",
* });
* const example2 = new azure.datafactory.DatasetJson("example2", {
* name: "dataset2",
* dataFactoryId: exampleFactory.id,
* linkedServiceName: exampleLinkedCustomService.name,
* azureBlobStorageLocation: {
* container: "container",
* path: "foo/bar/",
* filename: "bar.txt",
* },
* encoding: "UTF-8",
* });
* const example1FlowletDataFlow = new azure.datafactory.FlowletDataFlow("example1", {
* name: "example",
* dataFactoryId: exampleFactory.id,
* sources: [{
* name: "source1",
* linkedService: {
* name: exampleLinkedCustomService.name,
* },
* }],
* sinks: [{
* name: "sink1",
* linkedService: {
* name: exampleLinkedCustomService.name,
* },
* }],
* script: `source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* `,
* });
* const example2FlowletDataFlow = new azure.datafactory.FlowletDataFlow("example2", {
* name: "example",
* dataFactoryId: exampleFactory.id,
* sources: [{
* name: "source1",
* linkedService: {
* name: exampleLinkedCustomService.name,
* },
* }],
* sinks: [{
* name: "sink1",
* linkedService: {
* name: exampleLinkedCustomService.name,
* },
* }],
* script: `source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* `,
* });
* const exampleFlowletDataFlow = new azure.datafactory.FlowletDataFlow("example", {
* name: "example",
* dataFactoryId: exampleFactory.id,
* sources: [{
* name: "source1",
* flowlet: {
* name: example1FlowletDataFlow.name,
* },
* linkedService: {
* name: exampleLinkedCustomService.name,
* },
* }],
* sinks: [{
* name: "sink1",
* flowlet: {
* name: example2FlowletDataFlow.name,
* },
* linkedService: {
* name: exampleLinkedCustomService.name,
* },
* }],
* script: `source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* `,
* });
* ```
* ```python
* import pulumi
* import pulumi_azure as azure
* example = azure.core.ResourceGroup("example",
* name="example-resources",
* location="West Europe")
* example_account = azure.storage.Account("example",
* name="example",
* location=example.location,
* resource_group_name=example.name,
* account_tier="Standard",
* account_replication_type="LRS")
* example_factory = azure.datafactory.Factory("example",
* name="example",
* location=example.location,
* resource_group_name=example.name)
* example_linked_custom_service = azure.datafactory.LinkedCustomService("example",
* name="linked_service",
* data_factory_id=example_factory.id,
* type="AzureBlobStorage",
* type_properties_json=example_account.primary_connection_string.apply(lambda primary_connection_string: f"""{{
* "connectionString": "{primary_connection_string}"
* }}
* """))
* example1 = azure.datafactory.DatasetJson("example1",
* name="dataset1",
* data_factory_id=example_factory.id,
* linked_service_name=example_linked_custom_service.name,
* azure_blob_storage_location={
* "container": "container",
* "path": "foo/bar/",
* "filename": "foo.txt",
* },
* encoding="UTF-8")
* example2 = azure.datafactory.DatasetJson("example2",
* name="dataset2",
* data_factory_id=example_factory.id,
* linked_service_name=example_linked_custom_service.name,
* azure_blob_storage_location={
* "container": "container",
* "path": "foo/bar/",
* "filename": "bar.txt",
* },
* encoding="UTF-8")
* example1_flowlet_data_flow = azure.datafactory.FlowletDataFlow("example1",
* name="example",
* data_factory_id=example_factory.id,
* sources=[{
* "name": "source1",
* "linked_service": {
* "name": example_linked_custom_service.name,
* },
* }],
* sinks=[{
* "name": "sink1",
* "linked_service": {
* "name": example_linked_custom_service.name,
* },
* }],
* script="""source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* """)
* example2_flowlet_data_flow = azure.datafactory.FlowletDataFlow("example2",
* name="example",
* data_factory_id=example_factory.id,
* sources=[{
* "name": "source1",
* "linked_service": {
* "name": example_linked_custom_service.name,
* },
* }],
* sinks=[{
* "name": "sink1",
* "linked_service": {
* "name": example_linked_custom_service.name,
* },
* }],
* script="""source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* """)
* example_flowlet_data_flow = azure.datafactory.FlowletDataFlow("example",
* name="example",
* data_factory_id=example_factory.id,
* sources=[{
* "name": "source1",
* "flowlet": {
* "name": example1_flowlet_data_flow.name,
* },
* "linked_service": {
* "name": example_linked_custom_service.name,
* },
* }],
* sinks=[{
* "name": "sink1",
* "flowlet": {
* "name": example2_flowlet_data_flow.name,
* },
* "linked_service": {
* "name": example_linked_custom_service.name,
* },
* }],
* script="""source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* """)
* ```
* ```csharp
* using System.Collections.Generic;
* using System.Linq;
* using Pulumi;
* using Azure = Pulumi.Azure;
* return await Deployment.RunAsync(() =>
* {
* var example = new Azure.Core.ResourceGroup("example", new()
* {
* Name = "example-resources",
* Location = "West Europe",
* });
* var exampleAccount = new Azure.Storage.Account("example", new()
* {
* Name = "example",
* Location = example.Location,
* ResourceGroupName = example.Name,
* AccountTier = "Standard",
* AccountReplicationType = "LRS",
* });
* var exampleFactory = new Azure.DataFactory.Factory("example", new()
* {
* Name = "example",
* Location = example.Location,
* ResourceGroupName = example.Name,
* });
* var exampleLinkedCustomService = new Azure.DataFactory.LinkedCustomService("example", new()
* {
* Name = "linked_service",
* DataFactoryId = exampleFactory.Id,
* Type = "AzureBlobStorage",
* TypePropertiesJson = exampleAccount.PrimaryConnectionString.Apply(primaryConnectionString => @$"{{
* ""connectionString"": ""{primaryConnectionString}""
* }}
* "),
* });
* var example1 = new Azure.DataFactory.DatasetJson("example1", new()
* {
* Name = "dataset1",
* DataFactoryId = exampleFactory.Id,
* LinkedServiceName = exampleLinkedCustomService.Name,
* AzureBlobStorageLocation = new Azure.DataFactory.Inputs.DatasetJsonAzureBlobStorageLocationArgs
* {
* Container = "container",
* Path = "foo/bar/",
* Filename = "foo.txt",
* },
* Encoding = "UTF-8",
* });
* var example2 = new Azure.DataFactory.DatasetJson("example2", new()
* {
* Name = "dataset2",
* DataFactoryId = exampleFactory.Id,
* LinkedServiceName = exampleLinkedCustomService.Name,
* AzureBlobStorageLocation = new Azure.DataFactory.Inputs.DatasetJsonAzureBlobStorageLocationArgs
* {
* Container = "container",
* Path = "foo/bar/",
* Filename = "bar.txt",
* },
* Encoding = "UTF-8",
* });
* var example1FlowletDataFlow = new Azure.DataFactory.FlowletDataFlow("example1", new()
* {
* Name = "example",
* DataFactoryId = exampleFactory.Id,
* Sources = new[]
* {
* new Azure.DataFactory.Inputs.FlowletDataFlowSourceArgs
* {
* Name = "source1",
* LinkedService = new Azure.DataFactory.Inputs.FlowletDataFlowSourceLinkedServiceArgs
* {
* Name = exampleLinkedCustomService.Name,
* },
* },
* },
* Sinks = new[]
* {
* new Azure.DataFactory.Inputs.FlowletDataFlowSinkArgs
* {
* Name = "sink1",
* LinkedService = new Azure.DataFactory.Inputs.FlowletDataFlowSinkLinkedServiceArgs
* {
* Name = exampleLinkedCustomService.Name,
* },
* },
* },
* Script = @"source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* ",
* });
* var example2FlowletDataFlow = new Azure.DataFactory.FlowletDataFlow("example2", new()
* {
* Name = "example",
* DataFactoryId = exampleFactory.Id,
* Sources = new[]
* {
* new Azure.DataFactory.Inputs.FlowletDataFlowSourceArgs
* {
* Name = "source1",
* LinkedService = new Azure.DataFactory.Inputs.FlowletDataFlowSourceLinkedServiceArgs
* {
* Name = exampleLinkedCustomService.Name,
* },
* },
* },
* Sinks = new[]
* {
* new Azure.DataFactory.Inputs.FlowletDataFlowSinkArgs
* {
* Name = "sink1",
* LinkedService = new Azure.DataFactory.Inputs.FlowletDataFlowSinkLinkedServiceArgs
* {
* Name = exampleLinkedCustomService.Name,
* },
* },
* },
* Script = @"source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* ",
* });
* var exampleFlowletDataFlow = new Azure.DataFactory.FlowletDataFlow("example", new()
* {
* Name = "example",
* DataFactoryId = exampleFactory.Id,
* Sources = new[]
* {
* new Azure.DataFactory.Inputs.FlowletDataFlowSourceArgs
* {
* Name = "source1",
* Flowlet = new Azure.DataFactory.Inputs.FlowletDataFlowSourceFlowletArgs
* {
* Name = example1FlowletDataFlow.Name,
* },
* LinkedService = new Azure.DataFactory.Inputs.FlowletDataFlowSourceLinkedServiceArgs
* {
* Name = exampleLinkedCustomService.Name,
* },
* },
* },
* Sinks = new[]
* {
* new Azure.DataFactory.Inputs.FlowletDataFlowSinkArgs
* {
* Name = "sink1",
* Flowlet = new Azure.DataFactory.Inputs.FlowletDataFlowSinkFlowletArgs
* {
* Name = example2FlowletDataFlow.Name,
* },
* LinkedService = new Azure.DataFactory.Inputs.FlowletDataFlowSinkLinkedServiceArgs
* {
* Name = exampleLinkedCustomService.Name,
* },
* },
* },
* Script = @"source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* ",
* });
* });
* ```
* ```go
* package main
* import (
* "fmt"
* "github.com/pulumi/pulumi-azure/sdk/v5/go/azure/core"
* "github.com/pulumi/pulumi-azure/sdk/v5/go/azure/datafactory"
* "github.com/pulumi/pulumi-azure/sdk/v5/go/azure/storage"
* "github.com/pulumi/pulumi/sdk/v3/go/pulumi"
* )
* func main() {
* pulumi.Run(func(ctx *pulumi.Context) error {
* example, err := core.NewResourceGroup(ctx, "example", &core.ResourceGroupArgs{
* Name: pulumi.String("example-resources"),
* Location: pulumi.String("West Europe"),
* })
* if err != nil {
* return err
* }
* exampleAccount, err := storage.NewAccount(ctx, "example", &storage.AccountArgs{
* Name: pulumi.String("example"),
* Location: example.Location,
* ResourceGroupName: example.Name,
* AccountTier: pulumi.String("Standard"),
* AccountReplicationType: pulumi.String("LRS"),
* })
* if err != nil {
* return err
* }
* exampleFactory, err := datafactory.NewFactory(ctx, "example", &datafactory.FactoryArgs{
* Name: pulumi.String("example"),
* Location: example.Location,
* ResourceGroupName: example.Name,
* })
* if err != nil {
* return err
* }
* exampleLinkedCustomService, err := datafactory.NewLinkedCustomService(ctx, "example", &datafactory.LinkedCustomServiceArgs{
* Name: pulumi.String("linked_service"),
* DataFactoryId: exampleFactory.ID(),
* Type: pulumi.String("AzureBlobStorage"),
* TypePropertiesJson: exampleAccount.PrimaryConnectionString.ApplyT(func(primaryConnectionString string) (string, error) {
* return fmt.Sprintf("{\n \"connectionString\": \"%v\"\n}\n", primaryConnectionString), nil
* }).(pulumi.StringOutput),
* })
* if err != nil {
* return err
* }
* _, err = datafactory.NewDatasetJson(ctx, "example1", &datafactory.DatasetJsonArgs{
* Name: pulumi.String("dataset1"),
* DataFactoryId: exampleFactory.ID(),
* LinkedServiceName: exampleLinkedCustomService.Name,
* AzureBlobStorageLocation: &datafactory.DatasetJsonAzureBlobStorageLocationArgs{
* Container: pulumi.String("container"),
* Path: pulumi.String("foo/bar/"),
* Filename: pulumi.String("foo.txt"),
* },
* Encoding: pulumi.String("UTF-8"),
* })
* if err != nil {
* return err
* }
* _, err = datafactory.NewDatasetJson(ctx, "example2", &datafactory.DatasetJsonArgs{
* Name: pulumi.String("dataset2"),
* DataFactoryId: exampleFactory.ID(),
* LinkedServiceName: exampleLinkedCustomService.Name,
* AzureBlobStorageLocation: &datafactory.DatasetJsonAzureBlobStorageLocationArgs{
* Container: pulumi.String("container"),
* Path: pulumi.String("foo/bar/"),
* Filename: pulumi.String("bar.txt"),
* },
* Encoding: pulumi.String("UTF-8"),
* })
* if err != nil {
* return err
* }
* example1FlowletDataFlow, err := datafactory.NewFlowletDataFlow(ctx, "example1", &datafactory.FlowletDataFlowArgs{
* Name: pulumi.String("example"),
* DataFactoryId: exampleFactory.ID(),
* Sources: datafactory.FlowletDataFlowSourceArray{
* &datafactory.FlowletDataFlowSourceArgs{
* Name: pulumi.String("source1"),
* LinkedService: &datafactory.FlowletDataFlowSourceLinkedServiceArgs{
* Name: exampleLinkedCustomService.Name,
* },
* },
* },
* Sinks: datafactory.FlowletDataFlowSinkArray{
* &datafactory.FlowletDataFlowSinkArgs{
* Name: pulumi.String("sink1"),
* LinkedService: &datafactory.FlowletDataFlowSinkLinkedServiceArgs{
* Name: exampleLinkedCustomService.Name,
* },
* },
* },
* Script: pulumi.String(`source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* `),
* })
* if err != nil {
* return err
* }
* example2FlowletDataFlow, err := datafactory.NewFlowletDataFlow(ctx, "example2", &datafactory.FlowletDataFlowArgs{
* Name: pulumi.String("example"),
* DataFactoryId: exampleFactory.ID(),
* Sources: datafactory.FlowletDataFlowSourceArray{
* &datafactory.FlowletDataFlowSourceArgs{
* Name: pulumi.String("source1"),
* LinkedService: &datafactory.FlowletDataFlowSourceLinkedServiceArgs{
* Name: exampleLinkedCustomService.Name,
* },
* },
* },
* Sinks: datafactory.FlowletDataFlowSinkArray{
* &datafactory.FlowletDataFlowSinkArgs{
* Name: pulumi.String("sink1"),
* LinkedService: &datafactory.FlowletDataFlowSinkLinkedServiceArgs{
* Name: exampleLinkedCustomService.Name,
* },
* },
* },
* Script: pulumi.String(`source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* `),
* })
* if err != nil {
* return err
* }
* _, err = datafactory.NewFlowletDataFlow(ctx, "example", &datafactory.FlowletDataFlowArgs{
* Name: pulumi.String("example"),
* DataFactoryId: exampleFactory.ID(),
* Sources: datafactory.FlowletDataFlowSourceArray{
* &datafactory.FlowletDataFlowSourceArgs{
* Name: pulumi.String("source1"),
* Flowlet: &datafactory.FlowletDataFlowSourceFlowletArgs{
* Name: example1FlowletDataFlow.Name,
* },
* LinkedService: &datafactory.FlowletDataFlowSourceLinkedServiceArgs{
* Name: exampleLinkedCustomService.Name,
* },
* },
* },
* Sinks: datafactory.FlowletDataFlowSinkArray{
* &datafactory.FlowletDataFlowSinkArgs{
* Name: pulumi.String("sink1"),
* Flowlet: &datafactory.FlowletDataFlowSinkFlowletArgs{
* Name: example2FlowletDataFlow.Name,
* },
* LinkedService: &datafactory.FlowletDataFlowSinkLinkedServiceArgs{
* Name: exampleLinkedCustomService.Name,
* },
* },
* },
* Script: pulumi.String(`source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* `),
* })
* if err != nil {
* return err
* }
* return nil
* })
* }
* ```
* ```java
* package generated_program;
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.azure.core.ResourceGroup;
* import com.pulumi.azure.core.ResourceGroupArgs;
* import com.pulumi.azure.storage.Account;
* import com.pulumi.azure.storage.AccountArgs;
* import com.pulumi.azure.datafactory.Factory;
* import com.pulumi.azure.datafactory.FactoryArgs;
* import com.pulumi.azure.datafactory.LinkedCustomService;
* import com.pulumi.azure.datafactory.LinkedCustomServiceArgs;
* import com.pulumi.azure.datafactory.DatasetJson;
* import com.pulumi.azure.datafactory.DatasetJsonArgs;
* import com.pulumi.azure.datafactory.inputs.DatasetJsonAzureBlobStorageLocationArgs;
* import com.pulumi.azure.datafactory.FlowletDataFlow;
* import com.pulumi.azure.datafactory.FlowletDataFlowArgs;
* import com.pulumi.azure.datafactory.inputs.FlowletDataFlowSourceArgs;
* import com.pulumi.azure.datafactory.inputs.FlowletDataFlowSourceLinkedServiceArgs;
* import com.pulumi.azure.datafactory.inputs.FlowletDataFlowSinkArgs;
* import com.pulumi.azure.datafactory.inputs.FlowletDataFlowSinkLinkedServiceArgs;
* import com.pulumi.azure.datafactory.inputs.FlowletDataFlowSourceFlowletArgs;
* import com.pulumi.azure.datafactory.inputs.FlowletDataFlowSinkFlowletArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
* public static void stack(Context ctx) {
* var example = new ResourceGroup("example", ResourceGroupArgs.builder()
* .name("example-resources")
* .location("West Europe")
* .build());
* var exampleAccount = new Account("exampleAccount", AccountArgs.builder()
* .name("example")
* .location(example.location())
* .resourceGroupName(example.name())
* .accountTier("Standard")
* .accountReplicationType("LRS")
* .build());
* var exampleFactory = new Factory("exampleFactory", FactoryArgs.builder()
* .name("example")
* .location(example.location())
* .resourceGroupName(example.name())
* .build());
* var exampleLinkedCustomService = new LinkedCustomService("exampleLinkedCustomService", LinkedCustomServiceArgs.builder()
* .name("linked_service")
* .dataFactoryId(exampleFactory.id())
* .type("AzureBlobStorage")
* .typePropertiesJson(exampleAccount.primaryConnectionString().applyValue(primaryConnectionString -> """
* {
* "connectionString": "%s"
* }
* ", primaryConnectionString)))
* .build());
* var example1 = new DatasetJson("example1", DatasetJsonArgs.builder()
* .name("dataset1")
* .dataFactoryId(exampleFactory.id())
* .linkedServiceName(exampleLinkedCustomService.name())
* .azureBlobStorageLocation(DatasetJsonAzureBlobStorageLocationArgs.builder()
* .container("container")
* .path("foo/bar/")
* .filename("foo.txt")
* .build())
* .encoding("UTF-8")
* .build());
* var example2 = new DatasetJson("example2", DatasetJsonArgs.builder()
* .name("dataset2")
* .dataFactoryId(exampleFactory.id())
* .linkedServiceName(exampleLinkedCustomService.name())
* .azureBlobStorageLocation(DatasetJsonAzureBlobStorageLocationArgs.builder()
* .container("container")
* .path("foo/bar/")
* .filename("bar.txt")
* .build())
* .encoding("UTF-8")
* .build());
* var example1FlowletDataFlow = new FlowletDataFlow("example1FlowletDataFlow", FlowletDataFlowArgs.builder()
* .name("example")
* .dataFactoryId(exampleFactory.id())
* .sources(FlowletDataFlowSourceArgs.builder()
* .name("source1")
* .linkedService(FlowletDataFlowSourceLinkedServiceArgs.builder()
* .name(exampleLinkedCustomService.name())
* .build())
* .build())
* .sinks(FlowletDataFlowSinkArgs.builder()
* .name("sink1")
* .linkedService(FlowletDataFlowSinkLinkedServiceArgs.builder()
* .name(exampleLinkedCustomService.name())
* .build())
* .build())
* .script("""
* source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* """)
* .build());
* var example2FlowletDataFlow = new FlowletDataFlow("example2FlowletDataFlow", FlowletDataFlowArgs.builder()
* .name("example")
* .dataFactoryId(exampleFactory.id())
* .sources(FlowletDataFlowSourceArgs.builder()
* .name("source1")
* .linkedService(FlowletDataFlowSourceLinkedServiceArgs.builder()
* .name(exampleLinkedCustomService.name())
* .build())
* .build())
* .sinks(FlowletDataFlowSinkArgs.builder()
* .name("sink1")
* .linkedService(FlowletDataFlowSinkLinkedServiceArgs.builder()
* .name(exampleLinkedCustomService.name())
* .build())
* .build())
* .script("""
* source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* """)
* .build());
* var exampleFlowletDataFlow = new FlowletDataFlow("exampleFlowletDataFlow", FlowletDataFlowArgs.builder()
* .name("example")
* .dataFactoryId(exampleFactory.id())
* .sources(FlowletDataFlowSourceArgs.builder()
* .name("source1")
* .flowlet(FlowletDataFlowSourceFlowletArgs.builder()
* .name(example1FlowletDataFlow.name())
* .build())
* .linkedService(FlowletDataFlowSourceLinkedServiceArgs.builder()
* .name(exampleLinkedCustomService.name())
* .build())
* .build())
* .sinks(FlowletDataFlowSinkArgs.builder()
* .name("sink1")
* .flowlet(FlowletDataFlowSinkFlowletArgs.builder()
* .name(example2FlowletDataFlow.name())
* .build())
* .linkedService(FlowletDataFlowSinkLinkedServiceArgs.builder()
* .name(exampleLinkedCustomService.name())
* .build())
* .build())
* .script("""
* source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* """)
* .build());
* }
* }
* ```
* ```yaml
* resources:
* example:
* type: azure:core:ResourceGroup
* properties:
* name: example-resources
* location: West Europe
* exampleAccount:
* type: azure:storage:Account
* name: example
* properties:
* name: example
* location: ${example.location}
* resourceGroupName: ${example.name}
* accountTier: Standard
* accountReplicationType: LRS
* exampleFactory:
* type: azure:datafactory:Factory
* name: example
* properties:
* name: example
* location: ${example.location}
* resourceGroupName: ${example.name}
* exampleLinkedCustomService:
* type: azure:datafactory:LinkedCustomService
* name: example
* properties:
* name: linked_service
* dataFactoryId: ${exampleFactory.id}
* type: AzureBlobStorage
* typePropertiesJson: |
* {
* "connectionString": "${exampleAccount.primaryConnectionString}"
* }
* example1:
* type: azure:datafactory:DatasetJson
* properties:
* name: dataset1
* dataFactoryId: ${exampleFactory.id}
* linkedServiceName: ${exampleLinkedCustomService.name}
* azureBlobStorageLocation:
* container: container
* path: foo/bar/
* filename: foo.txt
* encoding: UTF-8
* example2:
* type: azure:datafactory:DatasetJson
* properties:
* name: dataset2
* dataFactoryId: ${exampleFactory.id}
* linkedServiceName: ${exampleLinkedCustomService.name}
* azureBlobStorageLocation:
* container: container
* path: foo/bar/
* filename: bar.txt
* encoding: UTF-8
* exampleFlowletDataFlow:
* type: azure:datafactory:FlowletDataFlow
* name: example
* properties:
* name: example
* dataFactoryId: ${exampleFactory.id}
* sources:
* - name: source1
* flowlet:
* name: ${example1FlowletDataFlow.name}
* linkedService:
* name: ${exampleLinkedCustomService.name}
* sinks:
* - name: sink1
* flowlet:
* name: ${example2FlowletDataFlow.name}
* linkedService:
* name: ${exampleLinkedCustomService.name}
* script: "source(\n allowSchemaDrift: true, \n validateSchema: false, \n limit: 100, \n ignoreNoFilesFound: false, \n documentForm: 'documentPerLine') ~> source1 \nsource1 sink(\n allowSchemaDrift: true, \n validateSchema: false, \n skipDuplicateMapInputs: true, \n skipDuplicateMapOutputs: true) ~> sink1\n"
* example1FlowletDataFlow:
* type: azure:datafactory:FlowletDataFlow
* name: example1
* properties:
* name: example
* dataFactoryId: ${exampleFactory.id}
* sources:
* - name: source1
* linkedService:
* name: ${exampleLinkedCustomService.name}
* sinks:
* - name: sink1
* linkedService:
* name: ${exampleLinkedCustomService.name}
* script: "source(\n allowSchemaDrift: true, \n validateSchema: false, \n limit: 100, \n ignoreNoFilesFound: false, \n documentForm: 'documentPerLine') ~> source1 \nsource1 sink(\n allowSchemaDrift: true, \n validateSchema: false, \n skipDuplicateMapInputs: true, \n skipDuplicateMapOutputs: true) ~> sink1\n"
* example2FlowletDataFlow:
* type: azure:datafactory:FlowletDataFlow
* name: example2
* properties:
* name: example
* dataFactoryId: ${exampleFactory.id}
* sources:
* - name: source1
* linkedService:
* name: ${exampleLinkedCustomService.name}
* sinks:
* - name: sink1
* linkedService:
* name: ${exampleLinkedCustomService.name}
* script: "source(\n allowSchemaDrift: true, \n validateSchema: false, \n limit: 100, \n ignoreNoFilesFound: false, \n documentForm: 'documentPerLine') ~> source1 \nsource1 sink(\n allowSchemaDrift: true, \n validateSchema: false, \n skipDuplicateMapInputs: true, \n skipDuplicateMapOutputs: true) ~> sink1\n"
* ```
*
* ## Import
* Data Factory Flowlet Data Flow can be imported using the `resource id`, e.g.
* ```sh
* $ pulumi import azure:datafactory/flowletDataFlow:FlowletDataFlow example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/dataflows/example
* ```
*/
public class FlowletDataFlow internal constructor(
override val javaResource: com.pulumi.azure.datafactory.FlowletDataFlow,
) : KotlinCustomResource(javaResource, FlowletDataFlowMapper) {
/**
* List of tags that can be used for describing the Data Factory Flowlet Data Flow.
*/
public val annotations: Output>?
get() = javaResource.annotations().applyValue({ args0 ->
args0.map({ args0 ->
args0.map({ args0 ->
args0
})
}).orElse(null)
})
/**
* The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource.
*/
public val dataFactoryId: Output
get() = javaResource.dataFactoryId().applyValue({ args0 -> args0 })
/**
* The description for the Data Factory Flowlet Data Flow.
*/
public val description: Output?
get() = javaResource.description().applyValue({ args0 ->
args0.map({ args0 ->
args0
}).orElse(null)
})
/**
* The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level.
*/
public val folder: Output?
get() = javaResource.folder().applyValue({ args0 -> args0.map({ args0 -> args0 }).orElse(null) })
/**
* Specifies the name of the Data Factory Flowlet Data Flow. Changing this forces a new resource to be created.
*/
public val name: Output
get() = javaResource.name().applyValue({ args0 -> args0 })
/**
* The script for the Data Factory Flowlet Data Flow.
*/
public val script: Output?
get() = javaResource.script().applyValue({ args0 -> args0.map({ args0 -> args0 }).orElse(null) })
/**
* The script lines for the Data Factory Flowlet Data Flow.
*/
public val scriptLines: Output>?
get() = javaResource.scriptLines().applyValue({ args0 ->
args0.map({ args0 ->
args0.map({ args0 ->
args0
})
}).orElse(null)
})
/**
* One or more `sink` blocks as defined below.
*/
public val sinks: Output>?
get() = javaResource.sinks().applyValue({ args0 ->
args0.map({ args0 ->
args0.map({ args0 ->
args0.let({ args0 -> flowletDataFlowSinkToKotlin(args0) })
})
}).orElse(null)
})
/**
* One or more `source` blocks as defined below.
*/
public val sources: Output>?
get() = javaResource.sources().applyValue({ args0 ->
args0.map({ args0 ->
args0.map({ args0 ->
args0.let({ args0 -> flowletDataFlowSourceToKotlin(args0) })
})
}).orElse(null)
})
/**
* One or more `transformation` blocks as defined below.
*/
public val transformations: Output>?
get() = javaResource.transformations().applyValue({ args0 ->
args0.map({ args0 ->
args0.map({ args0 ->
args0.let({ args0 ->
flowletDataFlowTransformationToKotlin(args0)
})
})
}).orElse(null)
})
}
public object FlowletDataFlowMapper : ResourceMapper {
override fun supportsMappingOfType(javaResource: Resource): Boolean =
com.pulumi.azure.datafactory.FlowletDataFlow::class == javaResource::class
override fun map(javaResource: Resource): FlowletDataFlow = FlowletDataFlow(
javaResource as
com.pulumi.azure.datafactory.FlowletDataFlow,
)
}
/**
* @see [FlowletDataFlow].
* @param name The _unique_ name of the resulting resource.
* @param block Builder for [FlowletDataFlow].
*/
public suspend fun flowletDataFlow(
name: String,
block: suspend FlowletDataFlowResourceBuilder.() -> Unit,
): FlowletDataFlow {
val builder = FlowletDataFlowResourceBuilder()
builder.name(name)
block(builder)
return builder.build()
}
/**
* @see [FlowletDataFlow].
* @param name The _unique_ name of the resulting resource.
*/
public fun flowletDataFlow(name: String): FlowletDataFlow {
val builder = FlowletDataFlowResourceBuilder()
builder.name(name)
return builder.build()
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy