com.pulumi.azure.datafactory.kotlin.FlowletDataFlowArgs.kt Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of pulumi-azure-kotlin Show documentation
Show all versions of pulumi-azure-kotlin Show documentation
Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.
@file:Suppress("NAME_SHADOWING", "DEPRECATION")
package com.pulumi.azure.datafactory.kotlin
import com.pulumi.azure.datafactory.FlowletDataFlowArgs.builder
import com.pulumi.azure.datafactory.kotlin.inputs.FlowletDataFlowSinkArgs
import com.pulumi.azure.datafactory.kotlin.inputs.FlowletDataFlowSinkArgsBuilder
import com.pulumi.azure.datafactory.kotlin.inputs.FlowletDataFlowSourceArgs
import com.pulumi.azure.datafactory.kotlin.inputs.FlowletDataFlowSourceArgsBuilder
import com.pulumi.azure.datafactory.kotlin.inputs.FlowletDataFlowTransformationArgs
import com.pulumi.azure.datafactory.kotlin.inputs.FlowletDataFlowTransformationArgsBuilder
import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.applySuspend
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.collections.List
import kotlin.jvm.JvmName
/**
* Manages a Flowlet Data Flow inside an Azure Data Factory.
* ## Example Usage
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as azure from "@pulumi/azure";
* const example = new azure.core.ResourceGroup("example", {
* name: "example-resources",
* location: "West Europe",
* });
* const exampleAccount = new azure.storage.Account("example", {
* name: "example",
* location: example.location,
* resourceGroupName: example.name,
* accountTier: "Standard",
* accountReplicationType: "LRS",
* });
* const exampleFactory = new azure.datafactory.Factory("example", {
* name: "example",
* location: example.location,
* resourceGroupName: example.name,
* });
* const exampleLinkedCustomService = new azure.datafactory.LinkedCustomService("example", {
* name: "linked_service",
* dataFactoryId: exampleFactory.id,
* type: "AzureBlobStorage",
* typePropertiesJson: pulumi.interpolate`{
* "connectionString": "${exampleAccount.primaryConnectionString}"
* }
* `,
* });
* const example1 = new azure.datafactory.DatasetJson("example1", {
* name: "dataset1",
* dataFactoryId: exampleFactory.id,
* linkedServiceName: exampleLinkedCustomService.name,
* azureBlobStorageLocation: {
* container: "container",
* path: "foo/bar/",
* filename: "foo.txt",
* },
* encoding: "UTF-8",
* });
* const example2 = new azure.datafactory.DatasetJson("example2", {
* name: "dataset2",
* dataFactoryId: exampleFactory.id,
* linkedServiceName: exampleLinkedCustomService.name,
* azureBlobStorageLocation: {
* container: "container",
* path: "foo/bar/",
* filename: "bar.txt",
* },
* encoding: "UTF-8",
* });
* const example1FlowletDataFlow = new azure.datafactory.FlowletDataFlow("example1", {
* name: "example",
* dataFactoryId: exampleFactory.id,
* sources: [{
* name: "source1",
* linkedService: {
* name: exampleLinkedCustomService.name,
* },
* }],
* sinks: [{
* name: "sink1",
* linkedService: {
* name: exampleLinkedCustomService.name,
* },
* }],
* script: `source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* `,
* });
* const example2FlowletDataFlow = new azure.datafactory.FlowletDataFlow("example2", {
* name: "example",
* dataFactoryId: exampleFactory.id,
* sources: [{
* name: "source1",
* linkedService: {
* name: exampleLinkedCustomService.name,
* },
* }],
* sinks: [{
* name: "sink1",
* linkedService: {
* name: exampleLinkedCustomService.name,
* },
* }],
* script: `source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* `,
* });
* const exampleFlowletDataFlow = new azure.datafactory.FlowletDataFlow("example", {
* name: "example",
* dataFactoryId: exampleFactory.id,
* sources: [{
* name: "source1",
* flowlet: {
* name: example1FlowletDataFlow.name,
* },
* linkedService: {
* name: exampleLinkedCustomService.name,
* },
* }],
* sinks: [{
* name: "sink1",
* flowlet: {
* name: example2FlowletDataFlow.name,
* },
* linkedService: {
* name: exampleLinkedCustomService.name,
* },
* }],
* script: `source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* `,
* });
* ```
* ```python
* import pulumi
* import pulumi_azure as azure
* example = azure.core.ResourceGroup("example",
* name="example-resources",
* location="West Europe")
* example_account = azure.storage.Account("example",
* name="example",
* location=example.location,
* resource_group_name=example.name,
* account_tier="Standard",
* account_replication_type="LRS")
* example_factory = azure.datafactory.Factory("example",
* name="example",
* location=example.location,
* resource_group_name=example.name)
* example_linked_custom_service = azure.datafactory.LinkedCustomService("example",
* name="linked_service",
* data_factory_id=example_factory.id,
* type="AzureBlobStorage",
* type_properties_json=example_account.primary_connection_string.apply(lambda primary_connection_string: f"""{{
* "connectionString": "{primary_connection_string}"
* }}
* """))
* example1 = azure.datafactory.DatasetJson("example1",
* name="dataset1",
* data_factory_id=example_factory.id,
* linked_service_name=example_linked_custom_service.name,
* azure_blob_storage_location=azure.datafactory.DatasetJsonAzureBlobStorageLocationArgs(
* container="container",
* path="foo/bar/",
* filename="foo.txt",
* ),
* encoding="UTF-8")
* example2 = azure.datafactory.DatasetJson("example2",
* name="dataset2",
* data_factory_id=example_factory.id,
* linked_service_name=example_linked_custom_service.name,
* azure_blob_storage_location=azure.datafactory.DatasetJsonAzureBlobStorageLocationArgs(
* container="container",
* path="foo/bar/",
* filename="bar.txt",
* ),
* encoding="UTF-8")
* example1_flowlet_data_flow = azure.datafactory.FlowletDataFlow("example1",
* name="example",
* data_factory_id=example_factory.id,
* sources=[azure.datafactory.FlowletDataFlowSourceArgs(
* name="source1",
* linked_service=azure.datafactory.FlowletDataFlowSourceLinkedServiceArgs(
* name=example_linked_custom_service.name,
* ),
* )],
* sinks=[azure.datafactory.FlowletDataFlowSinkArgs(
* name="sink1",
* linked_service=azure.datafactory.FlowletDataFlowSinkLinkedServiceArgs(
* name=example_linked_custom_service.name,
* ),
* )],
* script="""source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* """)
* example2_flowlet_data_flow = azure.datafactory.FlowletDataFlow("example2",
* name="example",
* data_factory_id=example_factory.id,
* sources=[azure.datafactory.FlowletDataFlowSourceArgs(
* name="source1",
* linked_service=azure.datafactory.FlowletDataFlowSourceLinkedServiceArgs(
* name=example_linked_custom_service.name,
* ),
* )],
* sinks=[azure.datafactory.FlowletDataFlowSinkArgs(
* name="sink1",
* linked_service=azure.datafactory.FlowletDataFlowSinkLinkedServiceArgs(
* name=example_linked_custom_service.name,
* ),
* )],
* script="""source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* """)
* example_flowlet_data_flow = azure.datafactory.FlowletDataFlow("example",
* name="example",
* data_factory_id=example_factory.id,
* sources=[azure.datafactory.FlowletDataFlowSourceArgs(
* name="source1",
* flowlet=azure.datafactory.FlowletDataFlowSourceFlowletArgs(
* name=example1_flowlet_data_flow.name,
* ),
* linked_service=azure.datafactory.FlowletDataFlowSourceLinkedServiceArgs(
* name=example_linked_custom_service.name,
* ),
* )],
* sinks=[azure.datafactory.FlowletDataFlowSinkArgs(
* name="sink1",
* flowlet=azure.datafactory.FlowletDataFlowSinkFlowletArgs(
* name=example2_flowlet_data_flow.name,
* ),
* linked_service=azure.datafactory.FlowletDataFlowSinkLinkedServiceArgs(
* name=example_linked_custom_service.name,
* ),
* )],
* script="""source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* """)
* ```
* ```csharp
* using System.Collections.Generic;
* using System.Linq;
* using Pulumi;
* using Azure = Pulumi.Azure;
* return await Deployment.RunAsync(() =>
* {
* var example = new Azure.Core.ResourceGroup("example", new()
* {
* Name = "example-resources",
* Location = "West Europe",
* });
* var exampleAccount = new Azure.Storage.Account("example", new()
* {
* Name = "example",
* Location = example.Location,
* ResourceGroupName = example.Name,
* AccountTier = "Standard",
* AccountReplicationType = "LRS",
* });
* var exampleFactory = new Azure.DataFactory.Factory("example", new()
* {
* Name = "example",
* Location = example.Location,
* ResourceGroupName = example.Name,
* });
* var exampleLinkedCustomService = new Azure.DataFactory.LinkedCustomService("example", new()
* {
* Name = "linked_service",
* DataFactoryId = exampleFactory.Id,
* Type = "AzureBlobStorage",
* TypePropertiesJson = exampleAccount.PrimaryConnectionString.Apply(primaryConnectionString => @$"{{
* ""connectionString"": ""{primaryConnectionString}""
* }}
* "),
* });
* var example1 = new Azure.DataFactory.DatasetJson("example1", new()
* {
* Name = "dataset1",
* DataFactoryId = exampleFactory.Id,
* LinkedServiceName = exampleLinkedCustomService.Name,
* AzureBlobStorageLocation = new Azure.DataFactory.Inputs.DatasetJsonAzureBlobStorageLocationArgs
* {
* Container = "container",
* Path = "foo/bar/",
* Filename = "foo.txt",
* },
* Encoding = "UTF-8",
* });
* var example2 = new Azure.DataFactory.DatasetJson("example2", new()
* {
* Name = "dataset2",
* DataFactoryId = exampleFactory.Id,
* LinkedServiceName = exampleLinkedCustomService.Name,
* AzureBlobStorageLocation = new Azure.DataFactory.Inputs.DatasetJsonAzureBlobStorageLocationArgs
* {
* Container = "container",
* Path = "foo/bar/",
* Filename = "bar.txt",
* },
* Encoding = "UTF-8",
* });
* var example1FlowletDataFlow = new Azure.DataFactory.FlowletDataFlow("example1", new()
* {
* Name = "example",
* DataFactoryId = exampleFactory.Id,
* Sources = new[]
* {
* new Azure.DataFactory.Inputs.FlowletDataFlowSourceArgs
* {
* Name = "source1",
* LinkedService = new Azure.DataFactory.Inputs.FlowletDataFlowSourceLinkedServiceArgs
* {
* Name = exampleLinkedCustomService.Name,
* },
* },
* },
* Sinks = new[]
* {
* new Azure.DataFactory.Inputs.FlowletDataFlowSinkArgs
* {
* Name = "sink1",
* LinkedService = new Azure.DataFactory.Inputs.FlowletDataFlowSinkLinkedServiceArgs
* {
* Name = exampleLinkedCustomService.Name,
* },
* },
* },
* Script = @"source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* ",
* });
* var example2FlowletDataFlow = new Azure.DataFactory.FlowletDataFlow("example2", new()
* {
* Name = "example",
* DataFactoryId = exampleFactory.Id,
* Sources = new[]
* {
* new Azure.DataFactory.Inputs.FlowletDataFlowSourceArgs
* {
* Name = "source1",
* LinkedService = new Azure.DataFactory.Inputs.FlowletDataFlowSourceLinkedServiceArgs
* {
* Name = exampleLinkedCustomService.Name,
* },
* },
* },
* Sinks = new[]
* {
* new Azure.DataFactory.Inputs.FlowletDataFlowSinkArgs
* {
* Name = "sink1",
* LinkedService = new Azure.DataFactory.Inputs.FlowletDataFlowSinkLinkedServiceArgs
* {
* Name = exampleLinkedCustomService.Name,
* },
* },
* },
* Script = @"source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* ",
* });
* var exampleFlowletDataFlow = new Azure.DataFactory.FlowletDataFlow("example", new()
* {
* Name = "example",
* DataFactoryId = exampleFactory.Id,
* Sources = new[]
* {
* new Azure.DataFactory.Inputs.FlowletDataFlowSourceArgs
* {
* Name = "source1",
* Flowlet = new Azure.DataFactory.Inputs.FlowletDataFlowSourceFlowletArgs
* {
* Name = example1FlowletDataFlow.Name,
* },
* LinkedService = new Azure.DataFactory.Inputs.FlowletDataFlowSourceLinkedServiceArgs
* {
* Name = exampleLinkedCustomService.Name,
* },
* },
* },
* Sinks = new[]
* {
* new Azure.DataFactory.Inputs.FlowletDataFlowSinkArgs
* {
* Name = "sink1",
* Flowlet = new Azure.DataFactory.Inputs.FlowletDataFlowSinkFlowletArgs
* {
* Name = example2FlowletDataFlow.Name,
* },
* LinkedService = new Azure.DataFactory.Inputs.FlowletDataFlowSinkLinkedServiceArgs
* {
* Name = exampleLinkedCustomService.Name,
* },
* },
* },
* Script = @"source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* ",
* });
* });
* ```
* ```go
* package main
* import (
* "fmt"
* "github.com/pulumi/pulumi-azure/sdk/v5/go/azure/core"
* "github.com/pulumi/pulumi-azure/sdk/v5/go/azure/datafactory"
* "github.com/pulumi/pulumi-azure/sdk/v5/go/azure/storage"
* "github.com/pulumi/pulumi/sdk/v3/go/pulumi"
* )
* func main() {
* pulumi.Run(func(ctx *pulumi.Context) error {
* example, err := core.NewResourceGroup(ctx, "example", &core.ResourceGroupArgs{
* Name: pulumi.String("example-resources"),
* Location: pulumi.String("West Europe"),
* })
* if err != nil {
* return err
* }
* exampleAccount, err := storage.NewAccount(ctx, "example", &storage.AccountArgs{
* Name: pulumi.String("example"),
* Location: example.Location,
* ResourceGroupName: example.Name,
* AccountTier: pulumi.String("Standard"),
* AccountReplicationType: pulumi.String("LRS"),
* })
* if err != nil {
* return err
* }
* exampleFactory, err := datafactory.NewFactory(ctx, "example", &datafactory.FactoryArgs{
* Name: pulumi.String("example"),
* Location: example.Location,
* ResourceGroupName: example.Name,
* })
* if err != nil {
* return err
* }
* exampleLinkedCustomService, err := datafactory.NewLinkedCustomService(ctx, "example", &datafactory.LinkedCustomServiceArgs{
* Name: pulumi.String("linked_service"),
* DataFactoryId: exampleFactory.ID(),
* Type: pulumi.String("AzureBlobStorage"),
* TypePropertiesJson: exampleAccount.PrimaryConnectionString.ApplyT(func(primaryConnectionString string) (string, error) {
* return fmt.Sprintf("{\n \"connectionString\": \"%v\"\n}\n", primaryConnectionString), nil
* }).(pulumi.StringOutput),
* })
* if err != nil {
* return err
* }
* _, err = datafactory.NewDatasetJson(ctx, "example1", &datafactory.DatasetJsonArgs{
* Name: pulumi.String("dataset1"),
* DataFactoryId: exampleFactory.ID(),
* LinkedServiceName: exampleLinkedCustomService.Name,
* AzureBlobStorageLocation: &datafactory.DatasetJsonAzureBlobStorageLocationArgs{
* Container: pulumi.String("container"),
* Path: pulumi.String("foo/bar/"),
* Filename: pulumi.String("foo.txt"),
* },
* Encoding: pulumi.String("UTF-8"),
* })
* if err != nil {
* return err
* }
* _, err = datafactory.NewDatasetJson(ctx, "example2", &datafactory.DatasetJsonArgs{
* Name: pulumi.String("dataset2"),
* DataFactoryId: exampleFactory.ID(),
* LinkedServiceName: exampleLinkedCustomService.Name,
* AzureBlobStorageLocation: &datafactory.DatasetJsonAzureBlobStorageLocationArgs{
* Container: pulumi.String("container"),
* Path: pulumi.String("foo/bar/"),
* Filename: pulumi.String("bar.txt"),
* },
* Encoding: pulumi.String("UTF-8"),
* })
* if err != nil {
* return err
* }
* example1FlowletDataFlow, err := datafactory.NewFlowletDataFlow(ctx, "example1", &datafactory.FlowletDataFlowArgs{
* Name: pulumi.String("example"),
* DataFactoryId: exampleFactory.ID(),
* Sources: datafactory.FlowletDataFlowSourceArray{
* &datafactory.FlowletDataFlowSourceArgs{
* Name: pulumi.String("source1"),
* LinkedService: &datafactory.FlowletDataFlowSourceLinkedServiceArgs{
* Name: exampleLinkedCustomService.Name,
* },
* },
* },
* Sinks: datafactory.FlowletDataFlowSinkArray{
* &datafactory.FlowletDataFlowSinkArgs{
* Name: pulumi.String("sink1"),
* LinkedService: &datafactory.FlowletDataFlowSinkLinkedServiceArgs{
* Name: exampleLinkedCustomService.Name,
* },
* },
* },
* Script: pulumi.String(`source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* `),
* })
* if err != nil {
* return err
* }
* example2FlowletDataFlow, err := datafactory.NewFlowletDataFlow(ctx, "example2", &datafactory.FlowletDataFlowArgs{
* Name: pulumi.String("example"),
* DataFactoryId: exampleFactory.ID(),
* Sources: datafactory.FlowletDataFlowSourceArray{
* &datafactory.FlowletDataFlowSourceArgs{
* Name: pulumi.String("source1"),
* LinkedService: &datafactory.FlowletDataFlowSourceLinkedServiceArgs{
* Name: exampleLinkedCustomService.Name,
* },
* },
* },
* Sinks: datafactory.FlowletDataFlowSinkArray{
* &datafactory.FlowletDataFlowSinkArgs{
* Name: pulumi.String("sink1"),
* LinkedService: &datafactory.FlowletDataFlowSinkLinkedServiceArgs{
* Name: exampleLinkedCustomService.Name,
* },
* },
* },
* Script: pulumi.String(`source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* `),
* })
* if err != nil {
* return err
* }
* _, err = datafactory.NewFlowletDataFlow(ctx, "example", &datafactory.FlowletDataFlowArgs{
* Name: pulumi.String("example"),
* DataFactoryId: exampleFactory.ID(),
* Sources: datafactory.FlowletDataFlowSourceArray{
* &datafactory.FlowletDataFlowSourceArgs{
* Name: pulumi.String("source1"),
* Flowlet: &datafactory.FlowletDataFlowSourceFlowletArgs{
* Name: example1FlowletDataFlow.Name,
* },
* LinkedService: &datafactory.FlowletDataFlowSourceLinkedServiceArgs{
* Name: exampleLinkedCustomService.Name,
* },
* },
* },
* Sinks: datafactory.FlowletDataFlowSinkArray{
* &datafactory.FlowletDataFlowSinkArgs{
* Name: pulumi.String("sink1"),
* Flowlet: &datafactory.FlowletDataFlowSinkFlowletArgs{
* Name: example2FlowletDataFlow.Name,
* },
* LinkedService: &datafactory.FlowletDataFlowSinkLinkedServiceArgs{
* Name: exampleLinkedCustomService.Name,
* },
* },
* },
* Script: pulumi.String(`source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* `),
* })
* if err != nil {
* return err
* }
* return nil
* })
* }
* ```
* ```java
* package generated_program;
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.azure.core.ResourceGroup;
* import com.pulumi.azure.core.ResourceGroupArgs;
* import com.pulumi.azure.storage.Account;
* import com.pulumi.azure.storage.AccountArgs;
* import com.pulumi.azure.datafactory.Factory;
* import com.pulumi.azure.datafactory.FactoryArgs;
* import com.pulumi.azure.datafactory.LinkedCustomService;
* import com.pulumi.azure.datafactory.LinkedCustomServiceArgs;
* import com.pulumi.azure.datafactory.DatasetJson;
* import com.pulumi.azure.datafactory.DatasetJsonArgs;
* import com.pulumi.azure.datafactory.inputs.DatasetJsonAzureBlobStorageLocationArgs;
* import com.pulumi.azure.datafactory.FlowletDataFlow;
* import com.pulumi.azure.datafactory.FlowletDataFlowArgs;
* import com.pulumi.azure.datafactory.inputs.FlowletDataFlowSourceArgs;
* import com.pulumi.azure.datafactory.inputs.FlowletDataFlowSourceLinkedServiceArgs;
* import com.pulumi.azure.datafactory.inputs.FlowletDataFlowSinkArgs;
* import com.pulumi.azure.datafactory.inputs.FlowletDataFlowSinkLinkedServiceArgs;
* import com.pulumi.azure.datafactory.inputs.FlowletDataFlowSourceFlowletArgs;
* import com.pulumi.azure.datafactory.inputs.FlowletDataFlowSinkFlowletArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
* public static void stack(Context ctx) {
* var example = new ResourceGroup("example", ResourceGroupArgs.builder()
* .name("example-resources")
* .location("West Europe")
* .build());
* var exampleAccount = new Account("exampleAccount", AccountArgs.builder()
* .name("example")
* .location(example.location())
* .resourceGroupName(example.name())
* .accountTier("Standard")
* .accountReplicationType("LRS")
* .build());
* var exampleFactory = new Factory("exampleFactory", FactoryArgs.builder()
* .name("example")
* .location(example.location())
* .resourceGroupName(example.name())
* .build());
* var exampleLinkedCustomService = new LinkedCustomService("exampleLinkedCustomService", LinkedCustomServiceArgs.builder()
* .name("linked_service")
* .dataFactoryId(exampleFactory.id())
* .type("AzureBlobStorage")
* .typePropertiesJson(exampleAccount.primaryConnectionString().applyValue(primaryConnectionString -> """
* {
* "connectionString": "%s"
* }
* ", primaryConnectionString)))
* .build());
* var example1 = new DatasetJson("example1", DatasetJsonArgs.builder()
* .name("dataset1")
* .dataFactoryId(exampleFactory.id())
* .linkedServiceName(exampleLinkedCustomService.name())
* .azureBlobStorageLocation(DatasetJsonAzureBlobStorageLocationArgs.builder()
* .container("container")
* .path("foo/bar/")
* .filename("foo.txt")
* .build())
* .encoding("UTF-8")
* .build());
* var example2 = new DatasetJson("example2", DatasetJsonArgs.builder()
* .name("dataset2")
* .dataFactoryId(exampleFactory.id())
* .linkedServiceName(exampleLinkedCustomService.name())
* .azureBlobStorageLocation(DatasetJsonAzureBlobStorageLocationArgs.builder()
* .container("container")
* .path("foo/bar/")
* .filename("bar.txt")
* .build())
* .encoding("UTF-8")
* .build());
* var example1FlowletDataFlow = new FlowletDataFlow("example1FlowletDataFlow", FlowletDataFlowArgs.builder()
* .name("example")
* .dataFactoryId(exampleFactory.id())
* .sources(FlowletDataFlowSourceArgs.builder()
* .name("source1")
* .linkedService(FlowletDataFlowSourceLinkedServiceArgs.builder()
* .name(exampleLinkedCustomService.name())
* .build())
* .build())
* .sinks(FlowletDataFlowSinkArgs.builder()
* .name("sink1")
* .linkedService(FlowletDataFlowSinkLinkedServiceArgs.builder()
* .name(exampleLinkedCustomService.name())
* .build())
* .build())
* .script("""
* source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* """)
* .build());
* var example2FlowletDataFlow = new FlowletDataFlow("example2FlowletDataFlow", FlowletDataFlowArgs.builder()
* .name("example")
* .dataFactoryId(exampleFactory.id())
* .sources(FlowletDataFlowSourceArgs.builder()
* .name("source1")
* .linkedService(FlowletDataFlowSourceLinkedServiceArgs.builder()
* .name(exampleLinkedCustomService.name())
* .build())
* .build())
* .sinks(FlowletDataFlowSinkArgs.builder()
* .name("sink1")
* .linkedService(FlowletDataFlowSinkLinkedServiceArgs.builder()
* .name(exampleLinkedCustomService.name())
* .build())
* .build())
* .script("""
* source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* """)
* .build());
* var exampleFlowletDataFlow = new FlowletDataFlow("exampleFlowletDataFlow", FlowletDataFlowArgs.builder()
* .name("example")
* .dataFactoryId(exampleFactory.id())
* .sources(FlowletDataFlowSourceArgs.builder()
* .name("source1")
* .flowlet(FlowletDataFlowSourceFlowletArgs.builder()
* .name(example1FlowletDataFlow.name())
* .build())
* .linkedService(FlowletDataFlowSourceLinkedServiceArgs.builder()
* .name(exampleLinkedCustomService.name())
* .build())
* .build())
* .sinks(FlowletDataFlowSinkArgs.builder()
* .name("sink1")
* .flowlet(FlowletDataFlowSinkFlowletArgs.builder()
* .name(example2FlowletDataFlow.name())
* .build())
* .linkedService(FlowletDataFlowSinkLinkedServiceArgs.builder()
* .name(exampleLinkedCustomService.name())
* .build())
* .build())
* .script("""
* source(
* allowSchemaDrift: true,
* validateSchema: false,
* limit: 100,
* ignoreNoFilesFound: false,
* documentForm: 'documentPerLine') ~> source1
* source1 sink(
* allowSchemaDrift: true,
* validateSchema: false,
* skipDuplicateMapInputs: true,
* skipDuplicateMapOutputs: true) ~> sink1
* """)
* .build());
* }
* }
* ```
* ```yaml
* resources:
* example:
* type: azure:core:ResourceGroup
* properties:
* name: example-resources
* location: West Europe
* exampleAccount:
* type: azure:storage:Account
* name: example
* properties:
* name: example
* location: ${example.location}
* resourceGroupName: ${example.name}
* accountTier: Standard
* accountReplicationType: LRS
* exampleFactory:
* type: azure:datafactory:Factory
* name: example
* properties:
* name: example
* location: ${example.location}
* resourceGroupName: ${example.name}
* exampleLinkedCustomService:
* type: azure:datafactory:LinkedCustomService
* name: example
* properties:
* name: linked_service
* dataFactoryId: ${exampleFactory.id}
* type: AzureBlobStorage
* typePropertiesJson: |
* {
* "connectionString": "${exampleAccount.primaryConnectionString}"
* }
* example1:
* type: azure:datafactory:DatasetJson
* properties:
* name: dataset1
* dataFactoryId: ${exampleFactory.id}
* linkedServiceName: ${exampleLinkedCustomService.name}
* azureBlobStorageLocation:
* container: container
* path: foo/bar/
* filename: foo.txt
* encoding: UTF-8
* example2:
* type: azure:datafactory:DatasetJson
* properties:
* name: dataset2
* dataFactoryId: ${exampleFactory.id}
* linkedServiceName: ${exampleLinkedCustomService.name}
* azureBlobStorageLocation:
* container: container
* path: foo/bar/
* filename: bar.txt
* encoding: UTF-8
* exampleFlowletDataFlow:
* type: azure:datafactory:FlowletDataFlow
* name: example
* properties:
* name: example
* dataFactoryId: ${exampleFactory.id}
* sources:
* - name: source1
* flowlet:
* name: ${example1FlowletDataFlow.name}
* linkedService:
* name: ${exampleLinkedCustomService.name}
* sinks:
* - name: sink1
* flowlet:
* name: ${example2FlowletDataFlow.name}
* linkedService:
* name: ${exampleLinkedCustomService.name}
* script: "source(\n allowSchemaDrift: true, \n validateSchema: false, \n limit: 100, \n ignoreNoFilesFound: false, \n documentForm: 'documentPerLine') ~> source1 \nsource1 sink(\n allowSchemaDrift: true, \n validateSchema: false, \n skipDuplicateMapInputs: true, \n skipDuplicateMapOutputs: true) ~> sink1\n"
* example1FlowletDataFlow:
* type: azure:datafactory:FlowletDataFlow
* name: example1
* properties:
* name: example
* dataFactoryId: ${exampleFactory.id}
* sources:
* - name: source1
* linkedService:
* name: ${exampleLinkedCustomService.name}
* sinks:
* - name: sink1
* linkedService:
* name: ${exampleLinkedCustomService.name}
* script: "source(\n allowSchemaDrift: true, \n validateSchema: false, \n limit: 100, \n ignoreNoFilesFound: false, \n documentForm: 'documentPerLine') ~> source1 \nsource1 sink(\n allowSchemaDrift: true, \n validateSchema: false, \n skipDuplicateMapInputs: true, \n skipDuplicateMapOutputs: true) ~> sink1\n"
* example2FlowletDataFlow:
* type: azure:datafactory:FlowletDataFlow
* name: example2
* properties:
* name: example
* dataFactoryId: ${exampleFactory.id}
* sources:
* - name: source1
* linkedService:
* name: ${exampleLinkedCustomService.name}
* sinks:
* - name: sink1
* linkedService:
* name: ${exampleLinkedCustomService.name}
* script: "source(\n allowSchemaDrift: true, \n validateSchema: false, \n limit: 100, \n ignoreNoFilesFound: false, \n documentForm: 'documentPerLine') ~> source1 \nsource1 sink(\n allowSchemaDrift: true, \n validateSchema: false, \n skipDuplicateMapInputs: true, \n skipDuplicateMapOutputs: true) ~> sink1\n"
* ```
*
* ## Import
* Data Factory Flowlet Data Flow can be imported using the `resource id`, e.g.
* ```sh
* $ pulumi import azure:datafactory/flowletDataFlow:FlowletDataFlow example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/dataflows/example
* ```
* @property annotations List of tags that can be used for describing the Data Factory Flowlet Data Flow.
* @property dataFactoryId The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource.
* @property description The description for the Data Factory Flowlet Data Flow.
* @property folder The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level.
* @property name Specifies the name of the Data Factory Flowlet Data Flow. Changing this forces a new resource to be created.
* @property script The script for the Data Factory Flowlet Data Flow.
* @property scriptLines The script lines for the Data Factory Flowlet Data Flow.
* @property sinks One or more `sink` blocks as defined below.
* @property sources One or more `source` blocks as defined below.
* @property transformations One or more `transformation` blocks as defined below.
*/
public data class FlowletDataFlowArgs(
public val annotations: Output>? = null,
public val dataFactoryId: Output? = null,
public val description: Output? = null,
public val folder: Output? = null,
public val name: Output? = null,
public val script: Output? = null,
public val scriptLines: Output>? = null,
public val sinks: Output>? = null,
public val sources: Output>? = null,
public val transformations: Output>? = null,
) : ConvertibleToJava {
override fun toJava(): com.pulumi.azure.datafactory.FlowletDataFlowArgs =
com.pulumi.azure.datafactory.FlowletDataFlowArgs.builder()
.annotations(annotations?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
.dataFactoryId(dataFactoryId?.applyValue({ args0 -> args0 }))
.description(description?.applyValue({ args0 -> args0 }))
.folder(folder?.applyValue({ args0 -> args0 }))
.name(name?.applyValue({ args0 -> args0 }))
.script(script?.applyValue({ args0 -> args0 }))
.scriptLines(scriptLines?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
.sinks(sinks?.applyValue({ args0 -> args0.map({ args0 -> args0.let({ args0 -> args0.toJava() }) }) }))
.sources(
sources?.applyValue({ args0 ->
args0.map({ args0 ->
args0.let({ args0 ->
args0.toJava()
})
})
}),
)
.transformations(
transformations?.applyValue({ args0 ->
args0.map({ args0 ->
args0.let({ args0 ->
args0.toJava()
})
})
}),
).build()
}
/**
* Builder for [FlowletDataFlowArgs].
*/
@PulumiTagMarker
public class FlowletDataFlowArgsBuilder internal constructor() {
private var annotations: Output>? = null
private var dataFactoryId: Output? = null
private var description: Output? = null
private var folder: Output? = null
private var name: Output? = null
private var script: Output? = null
private var scriptLines: Output>? = null
private var sinks: Output>? = null
private var sources: Output>? = null
private var transformations: Output>? = null
/**
* @param value List of tags that can be used for describing the Data Factory Flowlet Data Flow.
*/
@JvmName("gooiueolmaljryuu")
public suspend fun annotations(`value`: Output>) {
this.annotations = value
}
@JvmName("nkpqhvbmhsiboixk")
public suspend fun annotations(vararg values: Output) {
this.annotations = Output.all(values.asList())
}
/**
* @param values List of tags that can be used for describing the Data Factory Flowlet Data Flow.
*/
@JvmName("eqllemxlefyrecrw")
public suspend fun annotations(values: List
© 2015 - 2025 Weber Informatics LLC | Privacy Policy