All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.azure.streamanalytics.kotlin.OutputBlobArgs.kt Maven / Gradle / Ivy

Go to download

Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.

There is a newer version: 6.15.0.0
Show newest version
@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.azure.streamanalytics.kotlin

import com.pulumi.azure.streamanalytics.OutputBlobArgs.builder
import com.pulumi.azure.streamanalytics.kotlin.inputs.OutputBlobSerializationArgs
import com.pulumi.azure.streamanalytics.kotlin.inputs.OutputBlobSerializationArgsBuilder
import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.applySuspend
import kotlin.Int
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.jvm.JvmName

/**
 * Manages a Stream Analytics Output to Blob Storage.
 * ## Example Usage
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as azure from "@pulumi/azure";
 * const exampleResourceGroup = new azure.core.ResourceGroup("example", {
 *     name: "rg-example",
 *     location: "West Europe",
 * });
 * const example = azure.streamanalytics.getJobOutput({
 *     name: "example-job",
 *     resourceGroupName: exampleResourceGroup.name,
 * });
 * const exampleAccount = new azure.storage.Account("example", {
 *     name: "examplesa",
 *     resourceGroupName: exampleResourceGroup.name,
 *     location: exampleResourceGroup.location,
 *     accountTier: "Standard",
 *     accountReplicationType: "LRS",
 * });
 * const exampleContainer = new azure.storage.Container("example", {
 *     name: "example",
 *     storageAccountName: exampleAccount.name,
 *     containerAccessType: "private",
 * });
 * const exampleOutputBlob = new azure.streamanalytics.OutputBlob("example", {
 *     name: "output-to-blob-storage",
 *     streamAnalyticsJobName: example.apply(example => example.name),
 *     resourceGroupName: example.apply(example => example.resourceGroupName),
 *     storageAccountName: exampleAccount.name,
 *     storageAccountKey: exampleAccount.primaryAccessKey,
 *     storageContainerName: exampleContainer.name,
 *     pathPattern: "some-pattern",
 *     dateFormat: "yyyy-MM-dd",
 *     timeFormat: "HH",
 *     serialization: {
 *         type: "Csv",
 *         encoding: "UTF8",
 *         fieldDelimiter: ",",
 *     },
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_azure as azure
 * example_resource_group = azure.core.ResourceGroup("example",
 *     name="rg-example",
 *     location="West Europe")
 * example = azure.streamanalytics.get_job_output(name="example-job",
 *     resource_group_name=example_resource_group.name)
 * example_account = azure.storage.Account("example",
 *     name="examplesa",
 *     resource_group_name=example_resource_group.name,
 *     location=example_resource_group.location,
 *     account_tier="Standard",
 *     account_replication_type="LRS")
 * example_container = azure.storage.Container("example",
 *     name="example",
 *     storage_account_name=example_account.name,
 *     container_access_type="private")
 * example_output_blob = azure.streamanalytics.OutputBlob("example",
 *     name="output-to-blob-storage",
 *     stream_analytics_job_name=example.name,
 *     resource_group_name=example.resource_group_name,
 *     storage_account_name=example_account.name,
 *     storage_account_key=example_account.primary_access_key,
 *     storage_container_name=example_container.name,
 *     path_pattern="some-pattern",
 *     date_format="yyyy-MM-dd",
 *     time_format="HH",
 *     serialization=azure.streamanalytics.OutputBlobSerializationArgs(
 *         type="Csv",
 *         encoding="UTF8",
 *         field_delimiter=",",
 *     ))
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Azure = Pulumi.Azure;
 * return await Deployment.RunAsync(() =>
 * {
 *     var exampleResourceGroup = new Azure.Core.ResourceGroup("example", new()
 *     {
 *         Name = "rg-example",
 *         Location = "West Europe",
 *     });
 *     var example = Azure.StreamAnalytics.GetJob.Invoke(new()
 *     {
 *         Name = "example-job",
 *         ResourceGroupName = exampleResourceGroup.Name,
 *     });
 *     var exampleAccount = new Azure.Storage.Account("example", new()
 *     {
 *         Name = "examplesa",
 *         ResourceGroupName = exampleResourceGroup.Name,
 *         Location = exampleResourceGroup.Location,
 *         AccountTier = "Standard",
 *         AccountReplicationType = "LRS",
 *     });
 *     var exampleContainer = new Azure.Storage.Container("example", new()
 *     {
 *         Name = "example",
 *         StorageAccountName = exampleAccount.Name,
 *         ContainerAccessType = "private",
 *     });
 *     var exampleOutputBlob = new Azure.StreamAnalytics.OutputBlob("example", new()
 *     {
 *         Name = "output-to-blob-storage",
 *         StreamAnalyticsJobName = example.Apply(getJobResult => getJobResult.Name),
 *         ResourceGroupName = example.Apply(getJobResult => getJobResult.ResourceGroupName),
 *         StorageAccountName = exampleAccount.Name,
 *         StorageAccountKey = exampleAccount.PrimaryAccessKey,
 *         StorageContainerName = exampleContainer.Name,
 *         PathPattern = "some-pattern",
 *         DateFormat = "yyyy-MM-dd",
 *         TimeFormat = "HH",
 *         Serialization = new Azure.StreamAnalytics.Inputs.OutputBlobSerializationArgs
 *         {
 *             Type = "Csv",
 *             Encoding = "UTF8",
 *             FieldDelimiter = ",",
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/core"
 * 	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/storage"
 * 	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/streamanalytics"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		exampleResourceGroup, err := core.NewResourceGroup(ctx, "example", &core.ResourceGroupArgs{
 * 			Name:     pulumi.String("rg-example"),
 * 			Location: pulumi.String("West Europe"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		example := streamanalytics.LookupJobOutput(ctx, streamanalytics.GetJobOutputArgs{
 * 			Name:              pulumi.String("example-job"),
 * 			ResourceGroupName: exampleResourceGroup.Name,
 * 		}, nil)
 * 		exampleAccount, err := storage.NewAccount(ctx, "example", &storage.AccountArgs{
 * 			Name:                   pulumi.String("examplesa"),
 * 			ResourceGroupName:      exampleResourceGroup.Name,
 * 			Location:               exampleResourceGroup.Location,
 * 			AccountTier:            pulumi.String("Standard"),
 * 			AccountReplicationType: pulumi.String("LRS"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		exampleContainer, err := storage.NewContainer(ctx, "example", &storage.ContainerArgs{
 * 			Name:                pulumi.String("example"),
 * 			StorageAccountName:  exampleAccount.Name,
 * 			ContainerAccessType: pulumi.String("private"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = streamanalytics.NewOutputBlob(ctx, "example", &streamanalytics.OutputBlobArgs{
 * 			Name: pulumi.String("output-to-blob-storage"),
 * 			StreamAnalyticsJobName: example.ApplyT(func(example streamanalytics.GetJobResult) (*string, error) {
 * 				return &example.Name, nil
 * 			}).(pulumi.StringPtrOutput),
 * 			ResourceGroupName: example.ApplyT(func(example streamanalytics.GetJobResult) (*string, error) {
 * 				return &example.ResourceGroupName, nil
 * 			}).(pulumi.StringPtrOutput),
 * 			StorageAccountName:   exampleAccount.Name,
 * 			StorageAccountKey:    exampleAccount.PrimaryAccessKey,
 * 			StorageContainerName: exampleContainer.Name,
 * 			PathPattern:          pulumi.String("some-pattern"),
 * 			DateFormat:           pulumi.String("yyyy-MM-dd"),
 * 			TimeFormat:           pulumi.String("HH"),
 * 			Serialization: &streamanalytics.OutputBlobSerializationArgs{
 * 				Type:           pulumi.String("Csv"),
 * 				Encoding:       pulumi.String("UTF8"),
 * 				FieldDelimiter: pulumi.String(","),
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.azure.core.ResourceGroup;
 * import com.pulumi.azure.core.ResourceGroupArgs;
 * import com.pulumi.azure.streamanalytics.StreamanalyticsFunctions;
 * import com.pulumi.azure.streamanalytics.inputs.GetJobArgs;
 * import com.pulumi.azure.storage.Account;
 * import com.pulumi.azure.storage.AccountArgs;
 * import com.pulumi.azure.storage.Container;
 * import com.pulumi.azure.storage.ContainerArgs;
 * import com.pulumi.azure.streamanalytics.OutputBlob;
 * import com.pulumi.azure.streamanalytics.OutputBlobArgs;
 * import com.pulumi.azure.streamanalytics.inputs.OutputBlobSerializationArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var exampleResourceGroup = new ResourceGroup("exampleResourceGroup", ResourceGroupArgs.builder()
 *             .name("rg-example")
 *             .location("West Europe")
 *             .build());
 *         final var example = StreamanalyticsFunctions.getJob(GetJobArgs.builder()
 *             .name("example-job")
 *             .resourceGroupName(exampleResourceGroup.name())
 *             .build());
 *         var exampleAccount = new Account("exampleAccount", AccountArgs.builder()
 *             .name("examplesa")
 *             .resourceGroupName(exampleResourceGroup.name())
 *             .location(exampleResourceGroup.location())
 *             .accountTier("Standard")
 *             .accountReplicationType("LRS")
 *             .build());
 *         var exampleContainer = new Container("exampleContainer", ContainerArgs.builder()
 *             .name("example")
 *             .storageAccountName(exampleAccount.name())
 *             .containerAccessType("private")
 *             .build());
 *         var exampleOutputBlob = new OutputBlob("exampleOutputBlob", OutputBlobArgs.builder()
 *             .name("output-to-blob-storage")
 *             .streamAnalyticsJobName(example.applyValue(getJobResult -> getJobResult).applyValue(example -> example.applyValue(getJobResult -> getJobResult.name())))
 *             .resourceGroupName(example.applyValue(getJobResult -> getJobResult).applyValue(example -> example.applyValue(getJobResult -> getJobResult.resourceGroupName())))
 *             .storageAccountName(exampleAccount.name())
 *             .storageAccountKey(exampleAccount.primaryAccessKey())
 *             .storageContainerName(exampleContainer.name())
 *             .pathPattern("some-pattern")
 *             .dateFormat("yyyy-MM-dd")
 *             .timeFormat("HH")
 *             .serialization(OutputBlobSerializationArgs.builder()
 *                 .type("Csv")
 *                 .encoding("UTF8")
 *                 .fieldDelimiter(",")
 *                 .build())
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   exampleResourceGroup:
 *     type: azure:core:ResourceGroup
 *     name: example
 *     properties:
 *       name: rg-example
 *       location: West Europe
 *   exampleAccount:
 *     type: azure:storage:Account
 *     name: example
 *     properties:
 *       name: examplesa
 *       resourceGroupName: ${exampleResourceGroup.name}
 *       location: ${exampleResourceGroup.location}
 *       accountTier: Standard
 *       accountReplicationType: LRS
 *   exampleContainer:
 *     type: azure:storage:Container
 *     name: example
 *     properties:
 *       name: example
 *       storageAccountName: ${exampleAccount.name}
 *       containerAccessType: private
 *   exampleOutputBlob:
 *     type: azure:streamanalytics:OutputBlob
 *     name: example
 *     properties:
 *       name: output-to-blob-storage
 *       streamAnalyticsJobName: ${example.name}
 *       resourceGroupName: ${example.resourceGroupName}
 *       storageAccountName: ${exampleAccount.name}
 *       storageAccountKey: ${exampleAccount.primaryAccessKey}
 *       storageContainerName: ${exampleContainer.name}
 *       pathPattern: some-pattern
 *       dateFormat: yyyy-MM-dd
 *       timeFormat: HH
 *       serialization:
 *         type: Csv
 *         encoding: UTF8
 *         fieldDelimiter: ','
 * variables:
 *   example:
 *     fn::invoke:
 *       Function: azure:streamanalytics:getJob
 *       Arguments:
 *         name: example-job
 *         resourceGroupName: ${exampleResourceGroup.name}
 * ```
 * 
 * ## Import
 * Stream Analytics Outputs to Blob Storage can be imported using the `resource id`, e.g.
 * ```sh
 * $ pulumi import azure:streamanalytics/outputBlob:OutputBlob example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.StreamAnalytics/streamingJobs/job1/outputs/output1
 * ```
 * @property authenticationMode The authentication mode for the Stream Output. Possible values are `Msi` and `ConnectionString`. Defaults to `ConnectionString`.
 * @property batchMaxWaitTime The maximum wait time per batch in `hh:mm:ss` e.g. `00:02:00` for two minutes.
 * @property batchMinRows The minimum number of rows per batch (must be between `0` and `1000000`).
 * @property blobWriteMode Determines whether blob blocks are either committed automatically or appended. Possible values are `Append` and `Once`. Defaults to `Append`.
 * @property dateFormat The date format. Wherever `{date}` appears in `path_pattern`, the value of this property is used as the date format instead.
 * @property name The name of the Stream Output. Changing this forces a new resource to be created.
 * @property pathPattern The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job.
 * @property resourceGroupName The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
 * @property serialization A `serialization` block as defined below.
 * @property storageAccountKey The Access Key which should be used to connect to this Storage Account.
 * @property storageAccountName The name of the Storage Account.
 * @property storageContainerName The name of the Container within the Storage Account.
 * @property streamAnalyticsJobName The name of the Stream Analytics Job. Changing this forces a new resource to be created.
 * @property timeFormat The time format. Wherever `{time}` appears in `path_pattern`, the value of this property is used as the time format instead.
 */
public data class OutputBlobArgs(
    public val authenticationMode: Output? = null,
    public val batchMaxWaitTime: Output? = null,
    public val batchMinRows: Output? = null,
    public val blobWriteMode: Output? = null,
    public val dateFormat: Output? = null,
    public val name: Output? = null,
    public val pathPattern: Output? = null,
    public val resourceGroupName: Output? = null,
    public val serialization: Output? = null,
    public val storageAccountKey: Output? = null,
    public val storageAccountName: Output? = null,
    public val storageContainerName: Output? = null,
    public val streamAnalyticsJobName: Output? = null,
    public val timeFormat: Output? = null,
) : ConvertibleToJava {
    override fun toJava(): com.pulumi.azure.streamanalytics.OutputBlobArgs =
        com.pulumi.azure.streamanalytics.OutputBlobArgs.builder()
            .authenticationMode(authenticationMode?.applyValue({ args0 -> args0 }))
            .batchMaxWaitTime(batchMaxWaitTime?.applyValue({ args0 -> args0 }))
            .batchMinRows(batchMinRows?.applyValue({ args0 -> args0 }))
            .blobWriteMode(blobWriteMode?.applyValue({ args0 -> args0 }))
            .dateFormat(dateFormat?.applyValue({ args0 -> args0 }))
            .name(name?.applyValue({ args0 -> args0 }))
            .pathPattern(pathPattern?.applyValue({ args0 -> args0 }))
            .resourceGroupName(resourceGroupName?.applyValue({ args0 -> args0 }))
            .serialization(serialization?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
            .storageAccountKey(storageAccountKey?.applyValue({ args0 -> args0 }))
            .storageAccountName(storageAccountName?.applyValue({ args0 -> args0 }))
            .storageContainerName(storageContainerName?.applyValue({ args0 -> args0 }))
            .streamAnalyticsJobName(streamAnalyticsJobName?.applyValue({ args0 -> args0 }))
            .timeFormat(timeFormat?.applyValue({ args0 -> args0 })).build()
}

/**
 * Builder for [OutputBlobArgs].
 */
@PulumiTagMarker
public class OutputBlobArgsBuilder internal constructor() {
    private var authenticationMode: Output? = null

    private var batchMaxWaitTime: Output? = null

    private var batchMinRows: Output? = null

    private var blobWriteMode: Output? = null

    private var dateFormat: Output? = null

    private var name: Output? = null

    private var pathPattern: Output? = null

    private var resourceGroupName: Output? = null

    private var serialization: Output? = null

    private var storageAccountKey: Output? = null

    private var storageAccountName: Output? = null

    private var storageContainerName: Output? = null

    private var streamAnalyticsJobName: Output? = null

    private var timeFormat: Output? = null

    /**
     * @param value The authentication mode for the Stream Output. Possible values are `Msi` and `ConnectionString`. Defaults to `ConnectionString`.
     */
    @JvmName("bbekgiousslrlmie")
    public suspend fun authenticationMode(`value`: Output) {
        this.authenticationMode = value
    }

    /**
     * @param value The maximum wait time per batch in `hh:mm:ss` e.g. `00:02:00` for two minutes.
     */
    @JvmName("ptrscpfikamxgxbv")
    public suspend fun batchMaxWaitTime(`value`: Output) {
        this.batchMaxWaitTime = value
    }

    /**
     * @param value The minimum number of rows per batch (must be between `0` and `1000000`).
     */
    @JvmName("ggupukvogbgpsrqj")
    public suspend fun batchMinRows(`value`: Output) {
        this.batchMinRows = value
    }

    /**
     * @param value Determines whether blob blocks are either committed automatically or appended. Possible values are `Append` and `Once`. Defaults to `Append`.
     */
    @JvmName("bfwtlcohdvarmcng")
    public suspend fun blobWriteMode(`value`: Output) {
        this.blobWriteMode = value
    }

    /**
     * @param value The date format. Wherever `{date}` appears in `path_pattern`, the value of this property is used as the date format instead.
     */
    @JvmName("xktdtyvilggsgqcv")
    public suspend fun dateFormat(`value`: Output) {
        this.dateFormat = value
    }

    /**
     * @param value The name of the Stream Output. Changing this forces a new resource to be created.
     */
    @JvmName("ajwtebhhcxrwuilt")
    public suspend fun name(`value`: Output) {
        this.name = value
    }

    /**
     * @param value The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job.
     */
    @JvmName("gohmrcbmcyaxtdea")
    public suspend fun pathPattern(`value`: Output) {
        this.pathPattern = value
    }

    /**
     * @param value The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
     */
    @JvmName("frfxkfgkhpwwuvbx")
    public suspend fun resourceGroupName(`value`: Output) {
        this.resourceGroupName = value
    }

    /**
     * @param value A `serialization` block as defined below.
     */
    @JvmName("ibjmvxooernjdhir")
    public suspend fun serialization(`value`: Output) {
        this.serialization = value
    }

    /**
     * @param value The Access Key which should be used to connect to this Storage Account.
     */
    @JvmName("qqmlcttlrbluacei")
    public suspend fun storageAccountKey(`value`: Output) {
        this.storageAccountKey = value
    }

    /**
     * @param value The name of the Storage Account.
     */
    @JvmName("dnasndwgavajjdok")
    public suspend fun storageAccountName(`value`: Output) {
        this.storageAccountName = value
    }

    /**
     * @param value The name of the Container within the Storage Account.
     */
    @JvmName("kijhtcuuhvmfxnhi")
    public suspend fun storageContainerName(`value`: Output) {
        this.storageContainerName = value
    }

    /**
     * @param value The name of the Stream Analytics Job. Changing this forces a new resource to be created.
     */
    @JvmName("vylxmoclgjgcmlwa")
    public suspend fun streamAnalyticsJobName(`value`: Output) {
        this.streamAnalyticsJobName = value
    }

    /**
     * @param value The time format. Wherever `{time}` appears in `path_pattern`, the value of this property is used as the time format instead.
     */
    @JvmName("gmbmgpywellbtvpw")
    public suspend fun timeFormat(`value`: Output) {
        this.timeFormat = value
    }

    /**
     * @param value The authentication mode for the Stream Output. Possible values are `Msi` and `ConnectionString`. Defaults to `ConnectionString`.
     */
    @JvmName("gpcwmrwblwlspdtx")
    public suspend fun authenticationMode(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.authenticationMode = mapped
    }

    /**
     * @param value The maximum wait time per batch in `hh:mm:ss` e.g. `00:02:00` for two minutes.
     */
    @JvmName("oovmntmiakfywvnl")
    public suspend fun batchMaxWaitTime(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.batchMaxWaitTime = mapped
    }

    /**
     * @param value The minimum number of rows per batch (must be between `0` and `1000000`).
     */
    @JvmName("hplcxqjgpubdfoqh")
    public suspend fun batchMinRows(`value`: Int?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.batchMinRows = mapped
    }

    /**
     * @param value Determines whether blob blocks are either committed automatically or appended. Possible values are `Append` and `Once`. Defaults to `Append`.
     */
    @JvmName("hyexixwvqmhxpapp")
    public suspend fun blobWriteMode(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.blobWriteMode = mapped
    }

    /**
     * @param value The date format. Wherever `{date}` appears in `path_pattern`, the value of this property is used as the date format instead.
     */
    @JvmName("vdubummpmwpkxhil")
    public suspend fun dateFormat(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.dateFormat = mapped
    }

    /**
     * @param value The name of the Stream Output. Changing this forces a new resource to be created.
     */
    @JvmName("ssldebhxyhwmispn")
    public suspend fun name(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.name = mapped
    }

    /**
     * @param value The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job.
     */
    @JvmName("jhtshoqwensbhagx")
    public suspend fun pathPattern(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.pathPattern = mapped
    }

    /**
     * @param value The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
     */
    @JvmName("bwuuekhvauenthcq")
    public suspend fun resourceGroupName(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.resourceGroupName = mapped
    }

    /**
     * @param value A `serialization` block as defined below.
     */
    @JvmName("fqryttmcdmrpvadn")
    public suspend fun serialization(`value`: OutputBlobSerializationArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.serialization = mapped
    }

    /**
     * @param argument A `serialization` block as defined below.
     */
    @JvmName("rcmylxvmeytmiyan")
    public suspend
    fun serialization(argument: suspend OutputBlobSerializationArgsBuilder.() -> Unit) {
        val toBeMapped = OutputBlobSerializationArgsBuilder().applySuspend { argument() }.build()
        val mapped = of(toBeMapped)
        this.serialization = mapped
    }

    /**
     * @param value The Access Key which should be used to connect to this Storage Account.
     */
    @JvmName("bhutqlholxifabcr")
    public suspend fun storageAccountKey(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.storageAccountKey = mapped
    }

    /**
     * @param value The name of the Storage Account.
     */
    @JvmName("fpsrubtdmbbebgdd")
    public suspend fun storageAccountName(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.storageAccountName = mapped
    }

    /**
     * @param value The name of the Container within the Storage Account.
     */
    @JvmName("ocmtryhmlukehwaa")
    public suspend fun storageContainerName(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.storageContainerName = mapped
    }

    /**
     * @param value The name of the Stream Analytics Job. Changing this forces a new resource to be created.
     */
    @JvmName("toswdlrachbtydha")
    public suspend fun streamAnalyticsJobName(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.streamAnalyticsJobName = mapped
    }

    /**
     * @param value The time format. Wherever `{time}` appears in `path_pattern`, the value of this property is used as the time format instead.
     */
    @JvmName("cjigqrbscrpwmhht")
    public suspend fun timeFormat(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.timeFormat = mapped
    }

    internal fun build(): OutputBlobArgs = OutputBlobArgs(
        authenticationMode = authenticationMode,
        batchMaxWaitTime = batchMaxWaitTime,
        batchMinRows = batchMinRows,
        blobWriteMode = blobWriteMode,
        dateFormat = dateFormat,
        name = name,
        pathPattern = pathPattern,
        resourceGroupName = resourceGroupName,
        serialization = serialization,
        storageAccountKey = storageAccountKey,
        storageAccountName = storageAccountName,
        storageContainerName = storageContainerName,
        streamAnalyticsJobName = streamAnalyticsJobName,
        timeFormat = timeFormat,
    )
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy