Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
com.pulumi.azurenative.streamanalytics.kotlin.OutputArgs.kt Maven / Gradle / Ivy
@file:Suppress("NAME_SHADOWING", "DEPRECATION")
package com.pulumi.azurenative.streamanalytics.kotlin
import com.pulumi.azurenative.streamanalytics.OutputArgs.builder
import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiTagMarker
import kotlin.Any
import kotlin.Int
import kotlin.String
import kotlin.Suppress
import kotlin.jvm.JvmName
/**
* An output object, containing all information associated with the named output. All outputs are contained under a streaming job.
* Azure REST API version: 2020-03-01. Prior API version in Azure Native 1.x: 2016-03-01.
* Other available API versions: 2021-10-01-preview.
* ## Example Usage
* ### Create a DocumentDB output
* ```csharp
* using System.Collections.Generic;
* using System.Linq;
* using Pulumi;
* using AzureNative = Pulumi.AzureNative;
* return await Deployment.RunAsync(() =>
* {
* var output = new AzureNative.StreamAnalytics.Output("output", new()
* {
* Datasource = new AzureNative.StreamAnalytics.Inputs.DocumentDbOutputDataSourceArgs
* {
* AccountId = "someAccountId",
* AccountKey = "accountKey==",
* CollectionNamePattern = "collection",
* Database = "db01",
* DocumentId = "documentId",
* PartitionKey = "key",
* Type = "Microsoft.Storage/DocumentDB",
* },
* JobName = "sj2331",
* OutputName = "output3022",
* ResourceGroupName = "sjrg7983",
* });
* });
* ```
* ```go
* package main
* import (
* streamanalytics "github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v2"
* "github.com/pulumi/pulumi/sdk/v3/go/pulumi"
* )
* func main() {
* pulumi.Run(func(ctx *pulumi.Context) error {
* _, err := streamanalytics.NewOutput(ctx, "output", &streamanalytics.OutputArgs{
* Datasource: &streamanalytics.DocumentDbOutputDataSourceArgs{
* AccountId: pulumi.String("someAccountId"),
* AccountKey: pulumi.String("accountKey=="),
* CollectionNamePattern: pulumi.String("collection"),
* Database: pulumi.String("db01"),
* DocumentId: pulumi.String("documentId"),
* PartitionKey: pulumi.String("key"),
* Type: pulumi.String("Microsoft.Storage/DocumentDB"),
* },
* JobName: pulumi.String("sj2331"),
* OutputName: pulumi.String("output3022"),
* ResourceGroupName: pulumi.String("sjrg7983"),
* })
* if err != nil {
* return err
* }
* return nil
* })
* }
* ```
* ```java
* package generated_program;
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.azurenative.streamanalytics.Output;
* import com.pulumi.azurenative.streamanalytics.OutputArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
* public static void stack(Context ctx) {
* var output = new Output("output", OutputArgs.builder()
* .datasource(AzureDataLakeStoreOutputDataSourceArgs.builder()
* .accountId("someAccountId")
* .accountKey("accountKey==")
* .collectionNamePattern("collection")
* .database("db01")
* .documentId("documentId")
* .partitionKey("key")
* .type("Microsoft.Storage/DocumentDB")
* .build())
* .jobName("sj2331")
* .outputName("output3022")
* .resourceGroupName("sjrg7983")
* .build());
* }
* }
* ```
* ### Create a Gateway Message Bus output
* ```csharp
* using System.Collections.Generic;
* using System.Linq;
* using Pulumi;
* using AzureNative = Pulumi.AzureNative;
* return await Deployment.RunAsync(() =>
* {
* var output = new AzureNative.StreamAnalytics.Output("output", new()
* {
* Datasource = new AzureNative.StreamAnalytics.Inputs.GatewayMessageBusOutputDataSourceArgs
* {
* Topic = "EdgeTopic1",
* Type = "GatewayMessageBus",
* },
* JobName = "sj2331",
* OutputName = "output3022",
* ResourceGroupName = "sjrg7983",
* });
* });
* ```
* ```go
* package main
* import (
* streamanalytics "github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v2"
* "github.com/pulumi/pulumi/sdk/v3/go/pulumi"
* )
* func main() {
* pulumi.Run(func(ctx *pulumi.Context) error {
* _, err := streamanalytics.NewOutput(ctx, "output", &streamanalytics.OutputArgs{
* Datasource: &streamanalytics.GatewayMessageBusOutputDataSourceArgs{
* Topic: pulumi.String("EdgeTopic1"),
* Type: pulumi.String("GatewayMessageBus"),
* },
* JobName: pulumi.String("sj2331"),
* OutputName: pulumi.String("output3022"),
* ResourceGroupName: pulumi.String("sjrg7983"),
* })
* if err != nil {
* return err
* }
* return nil
* })
* }
* ```
* ```java
* package generated_program;
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.azurenative.streamanalytics.Output;
* import com.pulumi.azurenative.streamanalytics.OutputArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
* public static void stack(Context ctx) {
* var output = new Output("output", OutputArgs.builder()
* .datasource(GatewayMessageBusOutputDataSourceArgs.builder()
* .topic("EdgeTopic1")
* .type("GatewayMessageBus")
* .build())
* .jobName("sj2331")
* .outputName("output3022")
* .resourceGroupName("sjrg7983")
* .build());
* }
* }
* ```
* ### Create a Power BI output
* ```csharp
* using System.Collections.Generic;
* using System.Linq;
* using Pulumi;
* using AzureNative = Pulumi.AzureNative;
* return await Deployment.RunAsync(() =>
* {
* var output = new AzureNative.StreamAnalytics.Output("output", new()
* {
* Datasource = new AzureNative.StreamAnalytics.Inputs.PowerBIOutputDataSourceArgs
* {
* Dataset = "someDataset",
* GroupId = "ac40305e-3e8d-43ac-8161-c33799f43e95",
* GroupName = "MyPowerBIGroup",
* RefreshToken = "someRefreshToken==",
* Table = "someTable",
* TokenUserDisplayName = "Bob Smith",
* TokenUserPrincipalName = "[email protected] ",
* Type = "PowerBI",
* },
* JobName = "sj2331",
* OutputName = "output3022",
* ResourceGroupName = "sjrg7983",
* });
* });
* ```
* ```go
* package main
* import (
* streamanalytics "github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v2"
* "github.com/pulumi/pulumi/sdk/v3/go/pulumi"
* )
* func main() {
* pulumi.Run(func(ctx *pulumi.Context) error {
* _, err := streamanalytics.NewOutput(ctx, "output", &streamanalytics.OutputArgs{
* Datasource: &streamanalytics.PowerBIOutputDataSourceArgs{
* Dataset: pulumi.String("someDataset"),
* GroupId: pulumi.String("ac40305e-3e8d-43ac-8161-c33799f43e95"),
* GroupName: pulumi.String("MyPowerBIGroup"),
* RefreshToken: pulumi.String("someRefreshToken=="),
* Table: pulumi.String("someTable"),
* TokenUserDisplayName: pulumi.String("Bob Smith"),
* TokenUserPrincipalName: pulumi.String("[email protected] "),
* Type: pulumi.String("PowerBI"),
* },
* JobName: pulumi.String("sj2331"),
* OutputName: pulumi.String("output3022"),
* ResourceGroupName: pulumi.String("sjrg7983"),
* })
* if err != nil {
* return err
* }
* return nil
* })
* }
* ```
* ```java
* package generated_program;
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.azurenative.streamanalytics.Output;
* import com.pulumi.azurenative.streamanalytics.OutputArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
* public static void stack(Context ctx) {
* var output = new Output("output", OutputArgs.builder()
* .datasource(PowerBIOutputDataSourceArgs.builder()
* .dataset("someDataset")
* .groupId("ac40305e-3e8d-43ac-8161-c33799f43e95")
* .groupName("MyPowerBIGroup")
* .refreshToken("someRefreshToken==")
* .table("someTable")
* .tokenUserDisplayName("Bob Smith")
* .tokenUserPrincipalName("[email protected] ")
* .type("PowerBI")
* .build())
* .jobName("sj2331")
* .outputName("output3022")
* .resourceGroupName("sjrg7983")
* .build());
* }
* }
* ```
* ### Create a Service Bus Queue output with Avro serialization
* ```csharp
* using System.Collections.Generic;
* using System.Linq;
* using Pulumi;
* using AzureNative = Pulumi.AzureNative;
* return await Deployment.RunAsync(() =>
* {
* var output = new AzureNative.StreamAnalytics.Output("output", new()
* {
* Datasource = new AzureNative.StreamAnalytics.Inputs.ServiceBusQueueOutputDataSourceArgs
* {
* PropertyColumns = new[]
* {
* "column1",
* "column2",
* },
* QueueName = "sdkqueue",
* ServiceBusNamespace = "sdktest",
* SharedAccessPolicyKey = "sharedAccessPolicyKey=",
* SharedAccessPolicyName = "RootManageSharedAccessKey",
* SystemPropertyColumns = new Dictionary
* {
* ["MessageId"] = "col3",
* ["PartitionKey"] = "col4",
* },
* Type = "Microsoft.ServiceBus/Queue",
* },
* JobName = "sj5095",
* OutputName = "output3456",
* ResourceGroupName = "sjrg3410",
* Serialization = new AzureNative.StreamAnalytics.Inputs.AvroSerializationArgs
* {
* Type = "Avro",
* },
* });
* });
* ```
* ```go
* package main
* import (
* streamanalytics "github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v2"
* "github.com/pulumi/pulumi/sdk/v3/go/pulumi"
* )
* func main() {
* pulumi.Run(func(ctx *pulumi.Context) error {
* _, err := streamanalytics.NewOutput(ctx, "output", &streamanalytics.OutputArgs{
* Datasource: &streamanalytics.ServiceBusQueueOutputDataSourceArgs{
* PropertyColumns: pulumi.StringArray{
* pulumi.String("column1"),
* pulumi.String("column2"),
* },
* QueueName: pulumi.String("sdkqueue"),
* ServiceBusNamespace: pulumi.String("sdktest"),
* SharedAccessPolicyKey: pulumi.String("sharedAccessPolicyKey="),
* SharedAccessPolicyName: pulumi.String("RootManageSharedAccessKey"),
* SystemPropertyColumns: pulumi.Any(map[string]interface{}{
* "MessageId": "col3",
* "PartitionKey": "col4",
* }),
* Type: pulumi.String("Microsoft.ServiceBus/Queue"),
* },
* JobName: pulumi.String("sj5095"),
* OutputName: pulumi.String("output3456"),
* ResourceGroupName: pulumi.String("sjrg3410"),
* Serialization: &streamanalytics.AvroSerializationArgs{
* Type: pulumi.String("Avro"),
* },
* })
* if err != nil {
* return err
* }
* return nil
* })
* }
* ```
* ```java
* package generated_program;
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.azurenative.streamanalytics.Output;
* import com.pulumi.azurenative.streamanalytics.OutputArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
* public static void stack(Context ctx) {
* var output = new Output("output", OutputArgs.builder()
* .datasource(AzureDataLakeStoreOutputDataSourceArgs.builder()
* .propertyColumns(
* "column1",
* "column2")
* .queueName("sdkqueue")
* .serviceBusNamespace("sdktest")
* .sharedAccessPolicyKey("sharedAccessPolicyKey=")
* .sharedAccessPolicyName("RootManageSharedAccessKey")
* .systemPropertyColumns(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
* .type("Microsoft.ServiceBus/Queue")
* .build())
* .jobName("sj5095")
* .outputName("output3456")
* .resourceGroupName("sjrg3410")
* .serialization(AvroSerializationArgs.builder()
* .type("Avro")
* .build())
* .build());
* }
* }
* ```
* ### Create a Service Bus Topic output with CSV serialization
* ```csharp
* using System.Collections.Generic;
* using System.Linq;
* using Pulumi;
* using AzureNative = Pulumi.AzureNative;
* return await Deployment.RunAsync(() =>
* {
* var output = new AzureNative.StreamAnalytics.Output("output", new()
* {
* Datasource = new AzureNative.StreamAnalytics.Inputs.ServiceBusTopicOutputDataSourceArgs
* {
* PropertyColumns = new[]
* {
* "column1",
* "column2",
* },
* ServiceBusNamespace = "sdktest",
* SharedAccessPolicyKey = "sharedAccessPolicyKey=",
* SharedAccessPolicyName = "RootManageSharedAccessKey",
* TopicName = "sdktopic",
* Type = "Microsoft.ServiceBus/Topic",
* },
* JobName = "sj7094",
* OutputName = "output7886",
* ResourceGroupName = "sjrg6450",
* Serialization = new AzureNative.StreamAnalytics.Inputs.CsvSerializationArgs
* {
* Encoding = AzureNative.StreamAnalytics.Encoding.UTF8,
* FieldDelimiter = ",",
* Type = "Csv",
* },
* });
* });
* ```
* ```go
* package main
* import (
* streamanalytics "github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v2"
* "github.com/pulumi/pulumi/sdk/v3/go/pulumi"
* )
* func main() {
* pulumi.Run(func(ctx *pulumi.Context) error {
* _, err := streamanalytics.NewOutput(ctx, "output", &streamanalytics.OutputArgs{
* Datasource: &streamanalytics.ServiceBusTopicOutputDataSourceArgs{
* PropertyColumns: pulumi.StringArray{
* pulumi.String("column1"),
* pulumi.String("column2"),
* },
* ServiceBusNamespace: pulumi.String("sdktest"),
* SharedAccessPolicyKey: pulumi.String("sharedAccessPolicyKey="),
* SharedAccessPolicyName: pulumi.String("RootManageSharedAccessKey"),
* TopicName: pulumi.String("sdktopic"),
* Type: pulumi.String("Microsoft.ServiceBus/Topic"),
* },
* JobName: pulumi.String("sj7094"),
* OutputName: pulumi.String("output7886"),
* ResourceGroupName: pulumi.String("sjrg6450"),
* Serialization: &streamanalytics.CsvSerializationArgs{
* Encoding: pulumi.String(streamanalytics.EncodingUTF8),
* FieldDelimiter: pulumi.String(","),
* Type: pulumi.String("Csv"),
* },
* })
* if err != nil {
* return err
* }
* return nil
* })
* }
* ```
* ```java
* package generated_program;
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.azurenative.streamanalytics.Output;
* import com.pulumi.azurenative.streamanalytics.OutputArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
* public static void stack(Context ctx) {
* var output = new Output("output", OutputArgs.builder()
* .datasource(AzureDataLakeStoreOutputDataSourceArgs.builder()
* .propertyColumns(
* "column1",
* "column2")
* .serviceBusNamespace("sdktest")
* .sharedAccessPolicyKey("sharedAccessPolicyKey=")
* .sharedAccessPolicyName("RootManageSharedAccessKey")
* .topicName("sdktopic")
* .type("Microsoft.ServiceBus/Topic")
* .build())
* .jobName("sj7094")
* .outputName("output7886")
* .resourceGroupName("sjrg6450")
* .serialization(CsvSerializationArgs.builder()
* .encoding("UTF8")
* .fieldDelimiter(",")
* .type("Csv")
* .build())
* .build());
* }
* }
* ```
* ### Create a blob output with CSV serialization
* ```csharp
* using System.Collections.Generic;
* using System.Linq;
* using Pulumi;
* using AzureNative = Pulumi.AzureNative;
* return await Deployment.RunAsync(() =>
* {
* var output = new AzureNative.StreamAnalytics.Output("output", new()
* {
* Datasource = new AzureNative.StreamAnalytics.Inputs.BlobOutputDataSourceArgs
* {
* Container = "state",
* DateFormat = "yyyy/MM/dd",
* PathPattern = "{date}/{time}",
* StorageAccounts = new[]
* {
* new AzureNative.StreamAnalytics.Inputs.StorageAccountArgs
* {
* AccountKey = "accountKey==",
* AccountName = "someAccountName",
* },
* },
* TimeFormat = "HH",
* Type = "Microsoft.Storage/Blob",
* },
* JobName = "sj900",
* OutputName = "output1623",
* ResourceGroupName = "sjrg5023",
* Serialization = new AzureNative.StreamAnalytics.Inputs.CsvSerializationArgs
* {
* Encoding = AzureNative.StreamAnalytics.Encoding.UTF8,
* FieldDelimiter = ",",
* Type = "Csv",
* },
* });
* });
* ```
* ```go
* package main
* import (
* streamanalytics "github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v2"
* "github.com/pulumi/pulumi/sdk/v3/go/pulumi"
* )
* func main() {
* pulumi.Run(func(ctx *pulumi.Context) error {
* _, err := streamanalytics.NewOutput(ctx, "output", &streamanalytics.OutputArgs{
* Datasource: &streamanalytics.BlobOutputDataSourceArgs{
* Container: pulumi.String("state"),
* DateFormat: pulumi.String("yyyy/MM/dd"),
* PathPattern: pulumi.String("{date}/{time}"),
* StorageAccounts: streamanalytics.StorageAccountArray{
* &streamanalytics.StorageAccountArgs{
* AccountKey: pulumi.String("accountKey=="),
* AccountName: pulumi.String("someAccountName"),
* },
* },
* TimeFormat: pulumi.String("HH"),
* Type: pulumi.String("Microsoft.Storage/Blob"),
* },
* JobName: pulumi.String("sj900"),
* OutputName: pulumi.String("output1623"),
* ResourceGroupName: pulumi.String("sjrg5023"),
* Serialization: &streamanalytics.CsvSerializationArgs{
* Encoding: pulumi.String(streamanalytics.EncodingUTF8),
* FieldDelimiter: pulumi.String(","),
* Type: pulumi.String("Csv"),
* },
* })
* if err != nil {
* return err
* }
* return nil
* })
* }
* ```
* ```java
* package generated_program;
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.azurenative.streamanalytics.Output;
* import com.pulumi.azurenative.streamanalytics.OutputArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
* public static void stack(Context ctx) {
* var output = new Output("output", OutputArgs.builder()
* .datasource(AzureDataLakeStoreOutputDataSourceArgs.builder()
* .container("state")
* .dateFormat("yyyy/MM/dd")
* .pathPattern("{date}/{time}")
* .storageAccounts(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
* .timeFormat("HH")
* .type("Microsoft.Storage/Blob")
* .build())
* .jobName("sj900")
* .outputName("output1623")
* .resourceGroupName("sjrg5023")
* .serialization(CsvSerializationArgs.builder()
* .encoding("UTF8")
* .fieldDelimiter(",")
* .type("Csv")
* .build())
* .build());
* }
* }
* ```
* ### Create an Azure Data Lake Store output with JSON serialization
* ```csharp
* using System.Collections.Generic;
* using System.Linq;
* using Pulumi;
* using AzureNative = Pulumi.AzureNative;
* return await Deployment.RunAsync(() =>
* {
* var output = new AzureNative.StreamAnalytics.Output("output", new()
* {
* Datasource = new AzureNative.StreamAnalytics.Inputs.AzureDataLakeStoreOutputDataSourceArgs
* {
* AccountName = "someaccount",
* DateFormat = "yyyy/MM/dd",
* FilePathPrefix = "{date}/{time}",
* RefreshToken = "someRefreshToken==",
* TenantId = "cea4e98b-c798-49e7-8c40-4a2b3beb47dd",
* TimeFormat = "HH",
* TokenUserDisplayName = "Bob Smith",
* TokenUserPrincipalName = "[email protected] ",
* Type = "Microsoft.DataLake/Accounts",
* },
* JobName = "sj3310",
* OutputName = "output5195",
* ResourceGroupName = "sjrg6912",
* Serialization = new AzureNative.StreamAnalytics.Inputs.JsonSerializationArgs
* {
* Encoding = AzureNative.StreamAnalytics.Encoding.UTF8,
* Format = AzureNative.StreamAnalytics.JsonOutputSerializationFormat.Array,
* Type = "Json",
* },
* });
* });
* ```
* ```go
* package main
* import (
* streamanalytics "github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v2"
* "github.com/pulumi/pulumi/sdk/v3/go/pulumi"
* )
* func main() {
* pulumi.Run(func(ctx *pulumi.Context) error {
* _, err := streamanalytics.NewOutput(ctx, "output", &streamanalytics.OutputArgs{
* Datasource: &streamanalytics.AzureDataLakeStoreOutputDataSourceArgs{
* AccountName: pulumi.String("someaccount"),
* DateFormat: pulumi.String("yyyy/MM/dd"),
* FilePathPrefix: pulumi.String("{date}/{time}"),
* RefreshToken: pulumi.String("someRefreshToken=="),
* TenantId: pulumi.String("cea4e98b-c798-49e7-8c40-4a2b3beb47dd"),
* TimeFormat: pulumi.String("HH"),
* TokenUserDisplayName: pulumi.String("Bob Smith"),
* TokenUserPrincipalName: pulumi.String("[email protected] "),
* Type: pulumi.String("Microsoft.DataLake/Accounts"),
* },
* JobName: pulumi.String("sj3310"),
* OutputName: pulumi.String("output5195"),
* ResourceGroupName: pulumi.String("sjrg6912"),
* Serialization: &streamanalytics.JsonSerializationArgs{
* Encoding: pulumi.String(streamanalytics.EncodingUTF8),
* Format: pulumi.String(streamanalytics.JsonOutputSerializationFormatArray),
* Type: pulumi.String("Json"),
* },
* })
* if err != nil {
* return err
* }
* return nil
* })
* }
* ```
* ```java
* package generated_program;
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.azurenative.streamanalytics.Output;
* import com.pulumi.azurenative.streamanalytics.OutputArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
* public static void stack(Context ctx) {
* var output = new Output("output", OutputArgs.builder()
* .datasource(AzureDataLakeStoreOutputDataSourceArgs.builder()
* .accountName("someaccount")
* .dateFormat("yyyy/MM/dd")
* .filePathPrefix("{date}/{time}")
* .refreshToken("someRefreshToken==")
* .tenantId("cea4e98b-c798-49e7-8c40-4a2b3beb47dd")
* .timeFormat("HH")
* .tokenUserDisplayName("Bob Smith")
* .tokenUserPrincipalName("[email protected] ")
* .type("Microsoft.DataLake/Accounts")
* .build())
* .jobName("sj3310")
* .outputName("output5195")
* .resourceGroupName("sjrg6912")
* .serialization(JsonSerializationArgs.builder()
* .encoding("UTF8")
* .format("Array")
* .type("Json")
* .build())
* .build());
* }
* }
* ```
* ### Create an Azure Data Warehouse output
* ```csharp
* using System.Collections.Generic;
* using System.Linq;
* using Pulumi;
* using AzureNative = Pulumi.AzureNative;
* return await Deployment.RunAsync(() =>
* {
* var output = new AzureNative.StreamAnalytics.Output("output", new()
* {
* Datasource = new AzureNative.StreamAnalytics.Inputs.AzureSynapseOutputDataSourceArgs
* {
* Database = "zhayaSQLpool",
* Password = "password123",
* Server = "asatestserver",
* Table = "test2",
* Type = "Microsoft.Sql/Server/DataWarehouse",
* User = "tolladmin",
* },
* JobName = "sjName",
* OutputName = "dwOutput",
* ResourceGroupName = "sjrg",
* });
* });
* ```
* ```go
* package main
* import (
* streamanalytics "github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v2"
* "github.com/pulumi/pulumi/sdk/v3/go/pulumi"
* )
* func main() {
* pulumi.Run(func(ctx *pulumi.Context) error {
* _, err := streamanalytics.NewOutput(ctx, "output", &streamanalytics.OutputArgs{
* Datasource: &streamanalytics.AzureSynapseOutputDataSourceArgs{
* Database: pulumi.String("zhayaSQLpool"),
* Password: pulumi.String("password123"),
* Server: pulumi.String("asatestserver"),
* Table: pulumi.String("test2"),
* Type: pulumi.String("Microsoft.Sql/Server/DataWarehouse"),
* User: pulumi.String("tolladmin"),
* },
* JobName: pulumi.String("sjName"),
* OutputName: pulumi.String("dwOutput"),
* ResourceGroupName: pulumi.String("sjrg"),
* })
* if err != nil {
* return err
* }
* return nil
* })
* }
* ```
* ```java
* package generated_program;
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.azurenative.streamanalytics.Output;
* import com.pulumi.azurenative.streamanalytics.OutputArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
* public static void stack(Context ctx) {
* var output = new Output("output", OutputArgs.builder()
* .datasource(AzureDataLakeStoreOutputDataSourceArgs.builder()
* .database("zhayaSQLpool")
* .password("password123")
* .server("asatestserver")
* .table("test2")
* .type("Microsoft.Sql/Server/DataWarehouse")
* .user("tolladmin")
* .build())
* .jobName("sjName")
* .outputName("dwOutput")
* .resourceGroupName("sjrg")
* .build());
* }
* }
* ```
* ### Create an Azure Function output
* ```csharp
* using System.Collections.Generic;
* using System.Linq;
* using Pulumi;
* using AzureNative = Pulumi.AzureNative;
* return await Deployment.RunAsync(() =>
* {
* var output = new AzureNative.StreamAnalytics.Output("output", new()
* {
* Datasource = new AzureNative.StreamAnalytics.Inputs.AzureFunctionOutputDataSourceArgs
* {
* FunctionAppName = "functionappforasaautomation",
* FunctionName = "HttpTrigger2",
* MaxBatchCount = 100,
* MaxBatchSize = 256,
* Type = "Microsoft.AzureFunction",
* },
* JobName = "sjName",
* OutputName = "azureFunction1",
* ResourceGroupName = "sjrg",
* });
* });
* ```
* ```go
* package main
* import (
* streamanalytics "github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v2"
* "github.com/pulumi/pulumi/sdk/v3/go/pulumi"
* )
* func main() {
* pulumi.Run(func(ctx *pulumi.Context) error {
* _, err := streamanalytics.NewOutput(ctx, "output", &streamanalytics.OutputArgs{
* Datasource: &streamanalytics.AzureFunctionOutputDataSourceArgs{
* FunctionAppName: pulumi.String("functionappforasaautomation"),
* FunctionName: pulumi.String("HttpTrigger2"),
* MaxBatchCount: pulumi.Float64(100),
* MaxBatchSize: pulumi.Float64(256),
* Type: pulumi.String("Microsoft.AzureFunction"),
* },
* JobName: pulumi.String("sjName"),
* OutputName: pulumi.String("azureFunction1"),
* ResourceGroupName: pulumi.String("sjrg"),
* })
* if err != nil {
* return err
* }
* return nil
* })
* }
* ```
* ```java
* package generated_program;
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.azurenative.streamanalytics.Output;
* import com.pulumi.azurenative.streamanalytics.OutputArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
* public static void stack(Context ctx) {
* var output = new Output("output", OutputArgs.builder()
* .datasource(AzureDataLakeStoreOutputDataSourceArgs.builder()
* .functionAppName("functionappforasaautomation")
* .functionName("HttpTrigger2")
* .maxBatchCount(100)
* .maxBatchSize(256)
* .type("Microsoft.AzureFunction")
* .build())
* .jobName("sjName")
* .outputName("azureFunction1")
* .resourceGroupName("sjrg")
* .build());
* }
* }
* ```
* ### Create an Azure SQL database output
* ```csharp
* using System.Collections.Generic;
* using System.Linq;
* using Pulumi;
* using AzureNative = Pulumi.AzureNative;
* return await Deployment.RunAsync(() =>
* {
* var output = new AzureNative.StreamAnalytics.Output("output", new()
* {
* Datasource = new AzureNative.StreamAnalytics.Inputs.AzureSqlDatabaseOutputDataSourceArgs
* {
* Database = "someDatabase",
* Password = "somePassword",
* Server = "someServer",
* Table = "someTable",
* Type = "Microsoft.Sql/Server/Database",
* User = "",
* },
* JobName = "sj6458",
* OutputName = "output1755",
* ResourceGroupName = "sjrg2157",
* });
* });
* ```
* ```go
* package main
* import (
* streamanalytics "github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v2"
* "github.com/pulumi/pulumi/sdk/v3/go/pulumi"
* )
* func main() {
* pulumi.Run(func(ctx *pulumi.Context) error {
* _, err := streamanalytics.NewOutput(ctx, "output", &streamanalytics.OutputArgs{
* Datasource: &streamanalytics.AzureSqlDatabaseOutputDataSourceArgs{
* Database: pulumi.String("someDatabase"),
* Password: pulumi.String("somePassword"),
* Server: pulumi.String("someServer"),
* Table: pulumi.String("someTable"),
* Type: pulumi.String("Microsoft.Sql/Server/Database"),
* User: pulumi.String(""),
* },
* JobName: pulumi.String("sj6458"),
* OutputName: pulumi.String("output1755"),
* ResourceGroupName: pulumi.String("sjrg2157"),
* })
* if err != nil {
* return err
* }
* return nil
* })
* }
* ```
* ```java
* package generated_program;
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.azurenative.streamanalytics.Output;
* import com.pulumi.azurenative.streamanalytics.OutputArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
* public static void stack(Context ctx) {
* var output = new Output("output", OutputArgs.builder()
* .datasource(AzureDataLakeStoreOutputDataSourceArgs.builder()
* .database("someDatabase")
* .password("somePassword")
* .server("someServer")
* .table("someTable")
* .type("Microsoft.Sql/Server/Database")
* .user("")
* .build())
* .jobName("sj6458")
* .outputName("output1755")
* .resourceGroupName("sjrg2157")
* .build());
* }
* }
* ```
* ### Create an Azure Table output
* ```csharp
* using System.Collections.Generic;
* using System.Linq;
* using Pulumi;
* using AzureNative = Pulumi.AzureNative;
* return await Deployment.RunAsync(() =>
* {
* var output = new AzureNative.StreamAnalytics.Output("output", new()
* {
* Datasource = new AzureNative.StreamAnalytics.Inputs.AzureTableOutputDataSourceArgs
* {
* AccountKey = "accountKey==",
* AccountName = "someAccountName",
* BatchSize = 25,
* ColumnsToRemove = new[]
* {
* "column1",
* "column2",
* },
* PartitionKey = "partitionKey",
* RowKey = "rowKey",
* Table = "samples",
* Type = "Microsoft.Storage/Table",
* },
* JobName = "sj2790",
* OutputName = "output958",
* ResourceGroupName = "sjrg5176",
* });
* });
* ```
* ```go
* package main
* import (
* streamanalytics "github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v2"
* "github.com/pulumi/pulumi/sdk/v3/go/pulumi"
* )
* func main() {
* pulumi.Run(func(ctx *pulumi.Context) error {
* _, err := streamanalytics.NewOutput(ctx, "output", &streamanalytics.OutputArgs{
* Datasource: &streamanalytics.AzureTableOutputDataSourceArgs{
* AccountKey: pulumi.String("accountKey=="),
* AccountName: pulumi.String("someAccountName"),
* BatchSize: pulumi.Int(25),
* ColumnsToRemove: pulumi.StringArray{
* pulumi.String("column1"),
* pulumi.String("column2"),
* },
* PartitionKey: pulumi.String("partitionKey"),
* RowKey: pulumi.String("rowKey"),
* Table: pulumi.String("samples"),
* Type: pulumi.String("Microsoft.Storage/Table"),
* },
* JobName: pulumi.String("sj2790"),
* OutputName: pulumi.String("output958"),
* ResourceGroupName: pulumi.String("sjrg5176"),
* })
* if err != nil {
* return err
* }
* return nil
* })
* }
* ```
* ```java
* package generated_program;
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.azurenative.streamanalytics.Output;
* import com.pulumi.azurenative.streamanalytics.OutputArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
* public static void stack(Context ctx) {
* var output = new Output("output", OutputArgs.builder()
* .datasource(AzureDataLakeStoreOutputDataSourceArgs.builder()
* .accountKey("accountKey==")
* .accountName("someAccountName")
* .batchSize(25)
* .columnsToRemove(
* "column1",
* "column2")
* .partitionKey("partitionKey")
* .rowKey("rowKey")
* .table("samples")
* .type("Microsoft.Storage/Table")
* .build())
* .jobName("sj2790")
* .outputName("output958")
* .resourceGroupName("sjrg5176")
* .build());
* }
* }
* ```
* ### Create an Event Hub output with JSON serialization
* ```csharp
* using System.Collections.Generic;
* using System.Linq;
* using Pulumi;
* using AzureNative = Pulumi.AzureNative;
* return await Deployment.RunAsync(() =>
* {
* var output = new AzureNative.StreamAnalytics.Output("output", new()
* {
* Datasource = new AzureNative.StreamAnalytics.Inputs.EventHubOutputDataSourceArgs
* {
* EventHubName = "sdkeventhub",
* PartitionKey = "partitionKey",
* ServiceBusNamespace = "sdktest",
* SharedAccessPolicyKey = "sharedAccessPolicyKey=",
* SharedAccessPolicyName = "RootManageSharedAccessKey",
* Type = "Microsoft.ServiceBus/EventHub",
* },
* JobName = "sj3310",
* OutputName = "output5195",
* ResourceGroupName = "sjrg6912",
* Serialization = new AzureNative.StreamAnalytics.Inputs.JsonSerializationArgs
* {
* Encoding = AzureNative.StreamAnalytics.Encoding.UTF8,
* Format = AzureNative.StreamAnalytics.JsonOutputSerializationFormat.Array,
* Type = "Json",
* },
* });
* });
* ```
* ```go
* package main
* import (
* streamanalytics "github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v2"
* "github.com/pulumi/pulumi/sdk/v3/go/pulumi"
* )
* func main() {
* pulumi.Run(func(ctx *pulumi.Context) error {
* _, err := streamanalytics.NewOutput(ctx, "output", &streamanalytics.OutputArgs{
* Datasource: &streamanalytics.EventHubOutputDataSourceArgs{
* EventHubName: pulumi.String("sdkeventhub"),
* PartitionKey: pulumi.String("partitionKey"),
* ServiceBusNamespace: pulumi.String("sdktest"),
* SharedAccessPolicyKey: pulumi.String("sharedAccessPolicyKey="),
* SharedAccessPolicyName: pulumi.String("RootManageSharedAccessKey"),
* Type: pulumi.String("Microsoft.ServiceBus/EventHub"),
* },
* JobName: pulumi.String("sj3310"),
* OutputName: pulumi.String("output5195"),
* ResourceGroupName: pulumi.String("sjrg6912"),
* Serialization: &streamanalytics.JsonSerializationArgs{
* Encoding: pulumi.String(streamanalytics.EncodingUTF8),
* Format: pulumi.String(streamanalytics.JsonOutputSerializationFormatArray),
* Type: pulumi.String("Json"),
* },
* })
* if err != nil {
* return err
* }
* return nil
* })
* }
* ```
* ```java
* package generated_program;
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.azurenative.streamanalytics.Output;
* import com.pulumi.azurenative.streamanalytics.OutputArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
* public static void stack(Context ctx) {
* var output = new Output("output", OutputArgs.builder()
* .datasource(AzureDataLakeStoreOutputDataSourceArgs.builder()
* .eventHubName("sdkeventhub")
* .partitionKey("partitionKey")
* .serviceBusNamespace("sdktest")
* .sharedAccessPolicyKey("sharedAccessPolicyKey=")
* .sharedAccessPolicyName("RootManageSharedAccessKey")
* .type("Microsoft.ServiceBus/EventHub")
* .build())
* .jobName("sj3310")
* .outputName("output5195")
* .resourceGroupName("sjrg6912")
* .serialization(JsonSerializationArgs.builder()
* .encoding("UTF8")
* .format("Array")
* .type("Json")
* .build())
* .build());
* }
* }
* ```
* ## Import
* An existing resource can be imported using its type token, name, and identifier, e.g.
* ```sh
* $ pulumi import azure-native:streamanalytics:Output output5195 /subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}
* ```
* @property datasource Describes the data source that output will be written to. Required on PUT (CreateOrReplace) requests.
* @property jobName The name of the streaming job.
* @property name Resource name
* @property outputName The name of the output.
* @property resourceGroupName The name of the resource group. The name is case insensitive.
* @property serialization Describes how data from an input is serialized or how data is serialized when written to an output. Required on PUT (CreateOrReplace) requests.
* @property sizeWindow The size window to constrain a Stream Analytics output to.
* @property timeWindow The time frame for filtering Stream Analytics job outputs.
*/
public data class OutputArgs(
public val datasource: Output? = null,
public val jobName: Output? = null,
public val name: Output? = null,
public val outputName: Output? = null,
public val resourceGroupName: Output? = null,
public val serialization: Output? = null,
public val sizeWindow: Output? = null,
public val timeWindow: Output? = null,
) : ConvertibleToJava {
override fun toJava(): com.pulumi.azurenative.streamanalytics.OutputArgs =
com.pulumi.azurenative.streamanalytics.OutputArgs.builder()
.datasource(datasource?.applyValue({ args0 -> args0 }))
.jobName(jobName?.applyValue({ args0 -> args0 }))
.name(name?.applyValue({ args0 -> args0 }))
.outputName(outputName?.applyValue({ args0 -> args0 }))
.resourceGroupName(resourceGroupName?.applyValue({ args0 -> args0 }))
.serialization(serialization?.applyValue({ args0 -> args0 }))
.sizeWindow(sizeWindow?.applyValue({ args0 -> args0 }))
.timeWindow(timeWindow?.applyValue({ args0 -> args0 })).build()
}
/**
* Builder for [OutputArgs].
*/
@PulumiTagMarker
public class OutputArgsBuilder internal constructor() {
private var datasource: Output? = null
private var jobName: Output? = null
private var name: Output? = null
private var outputName: Output? = null
private var resourceGroupName: Output? = null
private var serialization: Output? = null
private var sizeWindow: Output? = null
private var timeWindow: Output? = null
/**
* @param value Describes the data source that output will be written to. Required on PUT (CreateOrReplace) requests.
*/
@JvmName("dgfthvqtnynbaagg")
public suspend fun datasource(`value`: Output) {
this.datasource = value
}
/**
* @param value The name of the streaming job.
*/
@JvmName("vcqskfeclbbuxqpk")
public suspend fun jobName(`value`: Output) {
this.jobName = value
}
/**
* @param value Resource name
*/
@JvmName("dmcyfvfsuwfulsol")
public suspend fun name(`value`: Output) {
this.name = value
}
/**
* @param value The name of the output.
*/
@JvmName("oimcucmhogrspury")
public suspend fun outputName(`value`: Output) {
this.outputName = value
}
/**
* @param value The name of the resource group. The name is case insensitive.
*/
@JvmName("xntnuoaornyoiqke")
public suspend fun resourceGroupName(`value`: Output) {
this.resourceGroupName = value
}
/**
* @param value Describes how data from an input is serialized or how data is serialized when written to an output. Required on PUT (CreateOrReplace) requests.
*/
@JvmName("brmyaotmlleigcar")
public suspend fun serialization(`value`: Output) {
this.serialization = value
}
/**
* @param value The size window to constrain a Stream Analytics output to.
*/
@JvmName("wcrucqwwphweknoc")
public suspend fun sizeWindow(`value`: Output) {
this.sizeWindow = value
}
/**
* @param value The time frame for filtering Stream Analytics job outputs.
*/
@JvmName("nnrujcdubwgkpoio")
public suspend fun timeWindow(`value`: Output) {
this.timeWindow = value
}
/**
* @param value Describes the data source that output will be written to. Required on PUT (CreateOrReplace) requests.
*/
@JvmName("cwxvaryalrfbwecf")
public suspend fun datasource(`value`: Any?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.datasource = mapped
}
/**
* @param value The name of the streaming job.
*/
@JvmName("mhglgusvlusonufe")
public suspend fun jobName(`value`: String?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.jobName = mapped
}
/**
* @param value Resource name
*/
@JvmName("ixxkvddackdlklsf")
public suspend fun name(`value`: String?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.name = mapped
}
/**
* @param value The name of the output.
*/
@JvmName("ptwxwpijxbikmcpi")
public suspend fun outputName(`value`: String?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.outputName = mapped
}
/**
* @param value The name of the resource group. The name is case insensitive.
*/
@JvmName("iuefxfpagqdqctpn")
public suspend fun resourceGroupName(`value`: String?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.resourceGroupName = mapped
}
/**
* @param value Describes how data from an input is serialized or how data is serialized when written to an output. Required on PUT (CreateOrReplace) requests.
*/
@JvmName("vwrqkkkdxugtiqtv")
public suspend fun serialization(`value`: Any?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.serialization = mapped
}
/**
* @param value The size window to constrain a Stream Analytics output to.
*/
@JvmName("jmrcewxuwxogiyvb")
public suspend fun sizeWindow(`value`: Int?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.sizeWindow = mapped
}
/**
* @param value The time frame for filtering Stream Analytics job outputs.
*/
@JvmName("jqetpndllajlbalv")
public suspend fun timeWindow(`value`: String?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.timeWindow = mapped
}
internal fun build(): OutputArgs = OutputArgs(
datasource = datasource,
jobName = jobName,
name = name,
outputName = outputName,
resourceGroupName = resourceGroupName,
serialization = serialization,
sizeWindow = sizeWindow,
timeWindow = timeWindow,
)
}