All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.aws.datasync.kotlin.LocationHdfsArgs.kt Maven / Gradle / Ivy

Go to download

Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.

There is a newer version: 6.57.0.0
Show newest version
@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.aws.datasync.kotlin

import com.pulumi.aws.datasync.LocationHdfsArgs.builder
import com.pulumi.aws.datasync.kotlin.inputs.LocationHdfsNameNodeArgs
import com.pulumi.aws.datasync.kotlin.inputs.LocationHdfsNameNodeArgsBuilder
import com.pulumi.aws.datasync.kotlin.inputs.LocationHdfsQopConfigurationArgs
import com.pulumi.aws.datasync.kotlin.inputs.LocationHdfsQopConfigurationArgsBuilder
import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.applySuspend
import kotlin.Int
import kotlin.Pair
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.collections.List
import kotlin.collections.Map
import kotlin.jvm.JvmName

/**
 * Manages an HDFS Location within AWS DataSync.
 * > **NOTE:** The DataSync Agents must be available before creating this resource.
 * ## Example Usage
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as aws from "@pulumi/aws";
 * const example = new aws.datasync.LocationHdfs("example", {
 *     agentArns: [exampleAwsDatasyncAgent.arn],
 *     authenticationType: "SIMPLE",
 *     simpleUser: "example",
 *     nameNodes: [{
 *         hostname: exampleAwsInstance.privateDns,
 *         port: 80,
 *     }],
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_aws as aws
 * example = aws.datasync.LocationHdfs("example",
 *     agent_arns=[example_aws_datasync_agent["arn"]],
 *     authentication_type="SIMPLE",
 *     simple_user="example",
 *     name_nodes=[{
 *         "hostname": example_aws_instance["privateDns"],
 *         "port": 80,
 *     }])
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Aws = Pulumi.Aws;
 * return await Deployment.RunAsync(() =>
 * {
 *     var example = new Aws.DataSync.LocationHdfs("example", new()
 *     {
 *         AgentArns = new[]
 *         {
 *             exampleAwsDatasyncAgent.Arn,
 *         },
 *         AuthenticationType = "SIMPLE",
 *         SimpleUser = "example",
 *         NameNodes = new[]
 *         {
 *             new Aws.DataSync.Inputs.LocationHdfsNameNodeArgs
 *             {
 *                 Hostname = exampleAwsInstance.PrivateDns,
 *                 Port = 80,
 *             },
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/datasync"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		_, err := datasync.NewLocationHdfs(ctx, "example", &datasync.LocationHdfsArgs{
 * 			AgentArns: pulumi.StringArray{
 * 				exampleAwsDatasyncAgent.Arn,
 * 			},
 * 			AuthenticationType: pulumi.String("SIMPLE"),
 * 			SimpleUser:         pulumi.String("example"),
 * 			NameNodes: datasync.LocationHdfsNameNodeArray{
 * 				&datasync.LocationHdfsNameNodeArgs{
 * 					Hostname: pulumi.Any(exampleAwsInstance.PrivateDns),
 * 					Port:     pulumi.Int(80),
 * 				},
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.aws.datasync.LocationHdfs;
 * import com.pulumi.aws.datasync.LocationHdfsArgs;
 * import com.pulumi.aws.datasync.inputs.LocationHdfsNameNodeArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var example = new LocationHdfs("example", LocationHdfsArgs.builder()
 *             .agentArns(exampleAwsDatasyncAgent.arn())
 *             .authenticationType("SIMPLE")
 *             .simpleUser("example")
 *             .nameNodes(LocationHdfsNameNodeArgs.builder()
 *                 .hostname(exampleAwsInstance.privateDns())
 *                 .port(80)
 *                 .build())
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   example:
 *     type: aws:datasync:LocationHdfs
 *     properties:
 *       agentArns:
 *         - ${exampleAwsDatasyncAgent.arn}
 *       authenticationType: SIMPLE
 *       simpleUser: example
 *       nameNodes:
 *         - hostname: ${exampleAwsInstance.privateDns}
 *           port: 80
 * ```
 * 
 * ### Kerberos Authentication
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as aws from "@pulumi/aws";
 * import * as std from "@pulumi/std";
 * const example = new aws.datasync.LocationHdfs("example", {
 *     agentArns: [exampleAwsDatasyncAgent.arn],
 *     authenticationType: "KERBEROS",
 *     nameNodes: [{
 *         hostname: exampleAwsInstance.privateDns,
 *         port: 80,
 *     }],
 *     kerberosPrincipal: "[email protected]",
 *     kerberosKeytabBase64: std.filebase64({
 *         input: "user.keytab",
 *     }).then(invoke => invoke.result),
 *     kerberosKrb5Conf: std.file({
 *         input: "krb5.conf",
 *     }).then(invoke => invoke.result),
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_aws as aws
 * import pulumi_std as std
 * example = aws.datasync.LocationHdfs("example",
 *     agent_arns=[example_aws_datasync_agent["arn"]],
 *     authentication_type="KERBEROS",
 *     name_nodes=[{
 *         "hostname": example_aws_instance["privateDns"],
 *         "port": 80,
 *     }],
 *     kerberos_principal="[email protected]",
 *     kerberos_keytab_base64=std.filebase64(input="user.keytab").result,
 *     kerberos_krb5_conf=std.file(input="krb5.conf").result)
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Aws = Pulumi.Aws;
 * using Std = Pulumi.Std;
 * return await Deployment.RunAsync(() =>
 * {
 *     var example = new Aws.DataSync.LocationHdfs("example", new()
 *     {
 *         AgentArns = new[]
 *         {
 *             exampleAwsDatasyncAgent.Arn,
 *         },
 *         AuthenticationType = "KERBEROS",
 *         NameNodes = new[]
 *         {
 *             new Aws.DataSync.Inputs.LocationHdfsNameNodeArgs
 *             {
 *                 Hostname = exampleAwsInstance.PrivateDns,
 *                 Port = 80,
 *             },
 *         },
 *         KerberosPrincipal = "[email protected]",
 *         KerberosKeytabBase64 = Std.Filebase64.Invoke(new()
 *         {
 *             Input = "user.keytab",
 *         }).Apply(invoke => invoke.Result),
 *         KerberosKrb5Conf = Std.File.Invoke(new()
 *         {
 *             Input = "krb5.conf",
 *         }).Apply(invoke => invoke.Result),
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/datasync"
 * 	"github.com/pulumi/pulumi-std/sdk/go/std"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		invokeFilebase64, err := std.Filebase64(ctx, &std.Filebase64Args{
 * 			Input: "user.keytab",
 * 		}, nil)
 * 		if err != nil {
 * 			return err
 * 		}
 * 		invokeFile1, err := std.File(ctx, &std.FileArgs{
 * 			Input: "krb5.conf",
 * 		}, nil)
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = datasync.NewLocationHdfs(ctx, "example", &datasync.LocationHdfsArgs{
 * 			AgentArns: pulumi.StringArray{
 * 				exampleAwsDatasyncAgent.Arn,
 * 			},
 * 			AuthenticationType: pulumi.String("KERBEROS"),
 * 			NameNodes: datasync.LocationHdfsNameNodeArray{
 * 				&datasync.LocationHdfsNameNodeArgs{
 * 					Hostname: pulumi.Any(exampleAwsInstance.PrivateDns),
 * 					Port:     pulumi.Int(80),
 * 				},
 * 			},
 * 			KerberosPrincipal:    pulumi.String("[email protected]"),
 * 			KerberosKeytabBase64: pulumi.String(invokeFilebase64.Result),
 * 			KerberosKrb5Conf:     pulumi.String(invokeFile1.Result),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.aws.datasync.LocationHdfs;
 * import com.pulumi.aws.datasync.LocationHdfsArgs;
 * import com.pulumi.aws.datasync.inputs.LocationHdfsNameNodeArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var example = new LocationHdfs("example", LocationHdfsArgs.builder()
 *             .agentArns(exampleAwsDatasyncAgent.arn())
 *             .authenticationType("KERBEROS")
 *             .nameNodes(LocationHdfsNameNodeArgs.builder()
 *                 .hostname(exampleAwsInstance.privateDns())
 *                 .port(80)
 *                 .build())
 *             .kerberosPrincipal("[email protected]")
 *             .kerberosKeytabBase64(StdFunctions.filebase64(Filebase64Args.builder()
 *                 .input("user.keytab")
 *                 .build()).result())
 *             .kerberosKrb5Conf(StdFunctions.file(FileArgs.builder()
 *                 .input("krb5.conf")
 *                 .build()).result())
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   example:
 *     type: aws:datasync:LocationHdfs
 *     properties:
 *       agentArns:
 *         - ${exampleAwsDatasyncAgent.arn}
 *       authenticationType: KERBEROS
 *       nameNodes:
 *         - hostname: ${exampleAwsInstance.privateDns}
 *           port: 80
 *       kerberosPrincipal: [email protected]
 *       kerberosKeytabBase64:
 *         fn::invoke:
 *           Function: std:filebase64
 *           Arguments:
 *             input: user.keytab
 *           Return: result
 *       kerberosKrb5Conf:
 *         fn::invoke:
 *           Function: std:file
 *           Arguments:
 *             input: krb5.conf
 *           Return: result
 * ```
 * 
 * ## Import
 * Using `pulumi import`, import `aws_datasync_location_hdfs` using the Amazon Resource Name (ARN). For example:
 * ```sh
 * $ pulumi import aws:datasync/locationHdfs:LocationHdfs example arn:aws:datasync:us-east-1:123456789012:location/loc-12345678901234567
 * ```
 * @property agentArns A list of DataSync Agent ARNs with which this location will be associated.
 * @property authenticationType The type of authentication used to determine the identity of the user. Valid values are `SIMPLE` and `KERBEROS`.
 * @property blockSize The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
 * @property kerberosKeytab The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use `kerberos_keytab_base64` instead whenever the value is not a valid UTF-8 string. If `KERBEROS` is specified for `authentication_type`, this parameter (or `kerberos_keytab_base64`) is required.
 * @property kerberosKeytabBase64 Use instead of `kerberos_keytab` to pass base64-encoded binary data directly. If `KERBEROS` is specified for `authentication_type`, this parameter (or `kerberos_keytab`) is required.
 * @property kerberosKrb5Conf The krb5.conf file that contains the Kerberos configuration information. Use `kerberos_krb5_conf_base64` instead whenever the value is not a valid UTF-8 string. If `KERBEROS` is specified for `authentication_type`, this parameter (or `kerberos_krb5_conf_base64`) is required.
 * @property kerberosKrb5ConfBase64 Use instead of `kerberos_krb5_conf` to pass base64-encoded binary data directly. If `KERBEROS` is specified for `authentication_type`, this parameter (or `kerberos_krb5_conf`) is required.
 * @property kerberosPrincipal The Kerberos principal with access to the files and folders on the HDFS cluster. If `KERBEROS` is specified for `authentication_type`, this parameter is required.
 * @property kmsKeyProviderUri The URI of the HDFS cluster's Key Management Server (KMS).
 * @property nameNodes The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
 * @property qopConfiguration The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If `qop_configuration` isn't specified, `rpc_protection` and `data_transfer_protection` default to `PRIVACY`. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value.  See configuration below.
 * @property replicationFactor The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
 * @property simpleUser The user name used to identify the client on the host operating system. If `SIMPLE` is specified for `authentication_type`, this parameter is required.
 * @property subdirectory A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
 * @property tags Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
 */
public data class LocationHdfsArgs(
    public val agentArns: Output>? = null,
    public val authenticationType: Output? = null,
    public val blockSize: Output? = null,
    public val kerberosKeytab: Output? = null,
    public val kerberosKeytabBase64: Output? = null,
    public val kerberosKrb5Conf: Output? = null,
    public val kerberosKrb5ConfBase64: Output? = null,
    public val kerberosPrincipal: Output? = null,
    public val kmsKeyProviderUri: Output? = null,
    public val nameNodes: Output>? = null,
    public val qopConfiguration: Output? = null,
    public val replicationFactor: Output? = null,
    public val simpleUser: Output? = null,
    public val subdirectory: Output? = null,
    public val tags: Output>? = null,
) : ConvertibleToJava {
    override fun toJava(): com.pulumi.aws.datasync.LocationHdfsArgs =
        com.pulumi.aws.datasync.LocationHdfsArgs.builder()
            .agentArns(agentArns?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
            .authenticationType(authenticationType?.applyValue({ args0 -> args0 }))
            .blockSize(blockSize?.applyValue({ args0 -> args0 }))
            .kerberosKeytab(kerberosKeytab?.applyValue({ args0 -> args0 }))
            .kerberosKeytabBase64(kerberosKeytabBase64?.applyValue({ args0 -> args0 }))
            .kerberosKrb5Conf(kerberosKrb5Conf?.applyValue({ args0 -> args0 }))
            .kerberosKrb5ConfBase64(kerberosKrb5ConfBase64?.applyValue({ args0 -> args0 }))
            .kerberosPrincipal(kerberosPrincipal?.applyValue({ args0 -> args0 }))
            .kmsKeyProviderUri(kmsKeyProviderUri?.applyValue({ args0 -> args0 }))
            .nameNodes(
                nameNodes?.applyValue({ args0 ->
                    args0.map({ args0 ->
                        args0.let({ args0 ->
                            args0.toJava()
                        })
                    })
                }),
            )
            .qopConfiguration(qopConfiguration?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
            .replicationFactor(replicationFactor?.applyValue({ args0 -> args0 }))
            .simpleUser(simpleUser?.applyValue({ args0 -> args0 }))
            .subdirectory(subdirectory?.applyValue({ args0 -> args0 }))
            .tags(
                tags?.applyValue({ args0 ->
                    args0.map({ args0 ->
                        args0.key.to(args0.value)
                    }).toMap()
                }),
            ).build()
}

/**
 * Builder for [LocationHdfsArgs].
 */
@PulumiTagMarker
public class LocationHdfsArgsBuilder internal constructor() {
    private var agentArns: Output>? = null

    private var authenticationType: Output? = null

    private var blockSize: Output? = null

    private var kerberosKeytab: Output? = null

    private var kerberosKeytabBase64: Output? = null

    private var kerberosKrb5Conf: Output? = null

    private var kerberosKrb5ConfBase64: Output? = null

    private var kerberosPrincipal: Output? = null

    private var kmsKeyProviderUri: Output? = null

    private var nameNodes: Output>? = null

    private var qopConfiguration: Output? = null

    private var replicationFactor: Output? = null

    private var simpleUser: Output? = null

    private var subdirectory: Output? = null

    private var tags: Output>? = null

    /**
     * @param value A list of DataSync Agent ARNs with which this location will be associated.
     */
    @JvmName("bimbiuortlfupckx")
    public suspend fun agentArns(`value`: Output>) {
        this.agentArns = value
    }

    @JvmName("fvgbqojiuvtfwbyy")
    public suspend fun agentArns(vararg values: Output) {
        this.agentArns = Output.all(values.asList())
    }

    /**
     * @param values A list of DataSync Agent ARNs with which this location will be associated.
     */
    @JvmName("sreeqnovxuckvong")
    public suspend fun agentArns(values: List>) {
        this.agentArns = Output.all(values)
    }

    /**
     * @param value The type of authentication used to determine the identity of the user. Valid values are `SIMPLE` and `KERBEROS`.
     */
    @JvmName("klqwpndeidkibbdr")
    public suspend fun authenticationType(`value`: Output) {
        this.authenticationType = value
    }

    /**
     * @param value The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
     */
    @JvmName("obxnbqkswewgrxey")
    public suspend fun blockSize(`value`: Output) {
        this.blockSize = value
    }

    /**
     * @param value The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use `kerberos_keytab_base64` instead whenever the value is not a valid UTF-8 string. If `KERBEROS` is specified for `authentication_type`, this parameter (or `kerberos_keytab_base64`) is required.
     */
    @JvmName("tgrfuprhfeuepjum")
    public suspend fun kerberosKeytab(`value`: Output) {
        this.kerberosKeytab = value
    }

    /**
     * @param value Use instead of `kerberos_keytab` to pass base64-encoded binary data directly. If `KERBEROS` is specified for `authentication_type`, this parameter (or `kerberos_keytab`) is required.
     */
    @JvmName("kpwquweuprhsbhkw")
    public suspend fun kerberosKeytabBase64(`value`: Output) {
        this.kerberosKeytabBase64 = value
    }

    /**
     * @param value The krb5.conf file that contains the Kerberos configuration information. Use `kerberos_krb5_conf_base64` instead whenever the value is not a valid UTF-8 string. If `KERBEROS` is specified for `authentication_type`, this parameter (or `kerberos_krb5_conf_base64`) is required.
     */
    @JvmName("feiuhouagvalvyeq")
    public suspend fun kerberosKrb5Conf(`value`: Output) {
        this.kerberosKrb5Conf = value
    }

    /**
     * @param value Use instead of `kerberos_krb5_conf` to pass base64-encoded binary data directly. If `KERBEROS` is specified for `authentication_type`, this parameter (or `kerberos_krb5_conf`) is required.
     */
    @JvmName("tkmqdwhdssscdxsm")
    public suspend fun kerberosKrb5ConfBase64(`value`: Output) {
        this.kerberosKrb5ConfBase64 = value
    }

    /**
     * @param value The Kerberos principal with access to the files and folders on the HDFS cluster. If `KERBEROS` is specified for `authentication_type`, this parameter is required.
     */
    @JvmName("ajjakbuexpwvkhge")
    public suspend fun kerberosPrincipal(`value`: Output) {
        this.kerberosPrincipal = value
    }

    /**
     * @param value The URI of the HDFS cluster's Key Management Server (KMS).
     */
    @JvmName("djaspypbinvfiupi")
    public suspend fun kmsKeyProviderUri(`value`: Output) {
        this.kmsKeyProviderUri = value
    }

    /**
     * @param value The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
     */
    @JvmName("cctsxqfsyfmfsryf")
    public suspend fun nameNodes(`value`: Output>) {
        this.nameNodes = value
    }

    @JvmName("nfaeumkhlohftykv")
    public suspend fun nameNodes(vararg values: Output) {
        this.nameNodes = Output.all(values.asList())
    }

    /**
     * @param values The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
     */
    @JvmName("awbquybmbpdvrwqe")
    public suspend fun nameNodes(values: List>) {
        this.nameNodes = Output.all(values)
    }

    /**
     * @param value The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If `qop_configuration` isn't specified, `rpc_protection` and `data_transfer_protection` default to `PRIVACY`. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value.  See configuration below.
     */
    @JvmName("mjslmwdvhohpijfq")
    public suspend fun qopConfiguration(`value`: Output) {
        this.qopConfiguration = value
    }

    /**
     * @param value The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
     */
    @JvmName("yaaemeuqiprehygr")
    public suspend fun replicationFactor(`value`: Output) {
        this.replicationFactor = value
    }

    /**
     * @param value The user name used to identify the client on the host operating system. If `SIMPLE` is specified for `authentication_type`, this parameter is required.
     */
    @JvmName("njeeolcydjycnjmm")
    public suspend fun simpleUser(`value`: Output) {
        this.simpleUser = value
    }

    /**
     * @param value A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
     */
    @JvmName("nvnnjlonqcwiurhv")
    public suspend fun subdirectory(`value`: Output) {
        this.subdirectory = value
    }

    /**
     * @param value Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
     */
    @JvmName("qlvoscdliadgwknf")
    public suspend fun tags(`value`: Output>) {
        this.tags = value
    }

    /**
     * @param value A list of DataSync Agent ARNs with which this location will be associated.
     */
    @JvmName("eccdwakvxxdjvole")
    public suspend fun agentArns(`value`: List?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.agentArns = mapped
    }

    /**
     * @param values A list of DataSync Agent ARNs with which this location will be associated.
     */
    @JvmName("mbipsjyfoihfespb")
    public suspend fun agentArns(vararg values: String) {
        val toBeMapped = values.toList()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.agentArns = mapped
    }

    /**
     * @param value The type of authentication used to determine the identity of the user. Valid values are `SIMPLE` and `KERBEROS`.
     */
    @JvmName("sjitdpywqwttimyp")
    public suspend fun authenticationType(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.authenticationType = mapped
    }

    /**
     * @param value The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
     */
    @JvmName("qwgthvvbrarieoml")
    public suspend fun blockSize(`value`: Int?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.blockSize = mapped
    }

    /**
     * @param value The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use `kerberos_keytab_base64` instead whenever the value is not a valid UTF-8 string. If `KERBEROS` is specified for `authentication_type`, this parameter (or `kerberos_keytab_base64`) is required.
     */
    @JvmName("cdidbfpfpepnhbrj")
    public suspend fun kerberosKeytab(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.kerberosKeytab = mapped
    }

    /**
     * @param value Use instead of `kerberos_keytab` to pass base64-encoded binary data directly. If `KERBEROS` is specified for `authentication_type`, this parameter (or `kerberos_keytab`) is required.
     */
    @JvmName("hbtgbwnpngdorecq")
    public suspend fun kerberosKeytabBase64(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.kerberosKeytabBase64 = mapped
    }

    /**
     * @param value The krb5.conf file that contains the Kerberos configuration information. Use `kerberos_krb5_conf_base64` instead whenever the value is not a valid UTF-8 string. If `KERBEROS` is specified for `authentication_type`, this parameter (or `kerberos_krb5_conf_base64`) is required.
     */
    @JvmName("xddgsikecjgfmgno")
    public suspend fun kerberosKrb5Conf(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.kerberosKrb5Conf = mapped
    }

    /**
     * @param value Use instead of `kerberos_krb5_conf` to pass base64-encoded binary data directly. If `KERBEROS` is specified for `authentication_type`, this parameter (or `kerberos_krb5_conf`) is required.
     */
    @JvmName("jtpnfnxseruqfyel")
    public suspend fun kerberosKrb5ConfBase64(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.kerberosKrb5ConfBase64 = mapped
    }

    /**
     * @param value The Kerberos principal with access to the files and folders on the HDFS cluster. If `KERBEROS` is specified for `authentication_type`, this parameter is required.
     */
    @JvmName("eneigxxjltyqnxlu")
    public suspend fun kerberosPrincipal(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.kerberosPrincipal = mapped
    }

    /**
     * @param value The URI of the HDFS cluster's Key Management Server (KMS).
     */
    @JvmName("gsoxvacmdqhxfdrr")
    public suspend fun kmsKeyProviderUri(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.kmsKeyProviderUri = mapped
    }

    /**
     * @param value The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
     */
    @JvmName("fjbkdxaiplsrjiok")
    public suspend fun nameNodes(`value`: List?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.nameNodes = mapped
    }

    /**
     * @param argument The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
     */
    @JvmName("itaoqdmqnajhevnn")
    public suspend fun nameNodes(argument: List Unit>) {
        val toBeMapped = argument.toList().map {
            LocationHdfsNameNodeArgsBuilder().applySuspend {
                it()
            }.build()
        }
        val mapped = of(toBeMapped)
        this.nameNodes = mapped
    }

    /**
     * @param argument The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
     */
    @JvmName("ptvilataqfpgyweh")
    public suspend fun nameNodes(vararg argument: suspend LocationHdfsNameNodeArgsBuilder.() -> Unit) {
        val toBeMapped = argument.toList().map {
            LocationHdfsNameNodeArgsBuilder().applySuspend {
                it()
            }.build()
        }
        val mapped = of(toBeMapped)
        this.nameNodes = mapped
    }

    /**
     * @param argument The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
     */
    @JvmName("yrnactexjnyqgllp")
    public suspend fun nameNodes(argument: suspend LocationHdfsNameNodeArgsBuilder.() -> Unit) {
        val toBeMapped = listOf(LocationHdfsNameNodeArgsBuilder().applySuspend { argument() }.build())
        val mapped = of(toBeMapped)
        this.nameNodes = mapped
    }

    /**
     * @param values The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
     */
    @JvmName("ctxiqnytukvojpnn")
    public suspend fun nameNodes(vararg values: LocationHdfsNameNodeArgs) {
        val toBeMapped = values.toList()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.nameNodes = mapped
    }

    /**
     * @param value The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If `qop_configuration` isn't specified, `rpc_protection` and `data_transfer_protection` default to `PRIVACY`. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value.  See configuration below.
     */
    @JvmName("cefkascdkdturxfv")
    public suspend fun qopConfiguration(`value`: LocationHdfsQopConfigurationArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.qopConfiguration = mapped
    }

    /**
     * @param argument The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If `qop_configuration` isn't specified, `rpc_protection` and `data_transfer_protection` default to `PRIVACY`. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value.  See configuration below.
     */
    @JvmName("ybjqukumhedbpptl")
    public suspend fun qopConfiguration(argument: suspend LocationHdfsQopConfigurationArgsBuilder.() -> Unit) {
        val toBeMapped = LocationHdfsQopConfigurationArgsBuilder().applySuspend { argument() }.build()
        val mapped = of(toBeMapped)
        this.qopConfiguration = mapped
    }

    /**
     * @param value The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
     */
    @JvmName("sbuinpgqsotbcgdy")
    public suspend fun replicationFactor(`value`: Int?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.replicationFactor = mapped
    }

    /**
     * @param value The user name used to identify the client on the host operating system. If `SIMPLE` is specified for `authentication_type`, this parameter is required.
     */
    @JvmName("qdyrqrjgewkusvis")
    public suspend fun simpleUser(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.simpleUser = mapped
    }

    /**
     * @param value A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
     */
    @JvmName("vvbrxqjqdushmiuv")
    public suspend fun subdirectory(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.subdirectory = mapped
    }

    /**
     * @param value Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
     */
    @JvmName("qyepulsfscaxjssj")
    public suspend fun tags(`value`: Map?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.tags = mapped
    }

    /**
     * @param values Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
     */
    @JvmName("dbxikabmdvcsjjlg")
    public fun tags(vararg values: Pair) {
        val toBeMapped = values.toMap()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.tags = mapped
    }

    internal fun build(): LocationHdfsArgs = LocationHdfsArgs(
        agentArns = agentArns,
        authenticationType = authenticationType,
        blockSize = blockSize,
        kerberosKeytab = kerberosKeytab,
        kerberosKeytabBase64 = kerberosKeytabBase64,
        kerberosKrb5Conf = kerberosKrb5Conf,
        kerberosKrb5ConfBase64 = kerberosKrb5ConfBase64,
        kerberosPrincipal = kerberosPrincipal,
        kmsKeyProviderUri = kmsKeyProviderUri,
        nameNodes = nameNodes,
        qopConfiguration = qopConfiguration,
        replicationFactor = replicationFactor,
        simpleUser = simpleUser,
        subdirectory = subdirectory,
        tags = tags,
    )
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy