com.pulumi.aws.datasync.LocationHdfs Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of aws Show documentation
Show all versions of aws Show documentation
A Pulumi package for creating and managing Amazon Web Services (AWS) cloud resources.
The newest version!
// *** WARNING: this file was generated by pulumi-java-gen. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package com.pulumi.aws.datasync;
import com.pulumi.aws.Utilities;
import com.pulumi.aws.datasync.LocationHdfsArgs;
import com.pulumi.aws.datasync.inputs.LocationHdfsState;
import com.pulumi.aws.datasync.outputs.LocationHdfsNameNode;
import com.pulumi.aws.datasync.outputs.LocationHdfsQopConfiguration;
import com.pulumi.core.Output;
import com.pulumi.core.annotations.Export;
import com.pulumi.core.annotations.ResourceType;
import com.pulumi.core.internal.Codegen;
import java.lang.Integer;
import java.lang.String;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import javax.annotation.Nullable;
/**
* Manages an HDFS Location within AWS DataSync.
*
* > **NOTE:** The DataSync Agents must be available before creating this resource.
*
* ## Example Usage
*
* <!--Start PulumiCodeChooser -->
*
* {@code
* package generated_program;
*
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.aws.datasync.LocationHdfs;
* import com.pulumi.aws.datasync.LocationHdfsArgs;
* import com.pulumi.aws.datasync.inputs.LocationHdfsNameNodeArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
*
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
*
* public static void stack(Context ctx) {
* var example = new LocationHdfs("example", LocationHdfsArgs.builder()
* .agentArns(exampleAwsDatasyncAgent.arn())
* .authenticationType("SIMPLE")
* .simpleUser("example")
* .nameNodes(LocationHdfsNameNodeArgs.builder()
* .hostname(exampleAwsInstance.privateDns())
* .port(80)
* .build())
* .build());
*
* }
* }
* }
*
* <!--End PulumiCodeChooser -->
*
* ### Kerberos Authentication
*
* <!--Start PulumiCodeChooser -->
*
* {@code
* package generated_program;
*
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.aws.datasync.LocationHdfs;
* import com.pulumi.aws.datasync.LocationHdfsArgs;
* import com.pulumi.aws.datasync.inputs.LocationHdfsNameNodeArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
*
* public class App }{{@code
* public static void main(String[] args) }{{@code
* Pulumi.run(App::stack);
* }}{@code
*
* public static void stack(Context ctx) }{{@code
* var example = new LocationHdfs("example", LocationHdfsArgs.builder()
* .agentArns(exampleAwsDatasyncAgent.arn())
* .authenticationType("KERBEROS")
* .nameNodes(LocationHdfsNameNodeArgs.builder()
* .hostname(exampleAwsInstance.privateDns())
* .port(80)
* .build())
* .kerberosPrincipal("user}{@literal @}{@code example.com")
* .kerberosKeytabBase64(StdFunctions.filebase64(Filebase64Args.builder()
* .input("user.keytab")
* .build()).result())
* .kerberosKrb5Conf(StdFunctions.file(FileArgs.builder()
* .input("krb5.conf")
* .build()).result())
* .build());
*
* }}{@code
* }}{@code
* }
*
* <!--End PulumiCodeChooser -->
*
* ## Import
*
* Using `pulumi import`, import `aws_datasync_location_hdfs` using the Amazon Resource Name (ARN). For example:
*
* ```sh
* $ pulumi import aws:datasync/locationHdfs:LocationHdfs example arn:aws:datasync:us-east-1:123456789012:location/loc-12345678901234567
* ```
*
*/
@ResourceType(type="aws:datasync/locationHdfs:LocationHdfs")
public class LocationHdfs extends com.pulumi.resources.CustomResource {
/**
* A list of DataSync Agent ARNs with which this location will be associated.
*
*/
@Export(name="agentArns", refs={List.class,String.class}, tree="[0,1]")
private Output> agentArns;
/**
* @return A list of DataSync Agent ARNs with which this location will be associated.
*
*/
public Output> agentArns() {
return this.agentArns;
}
/**
* Amazon Resource Name (ARN) of the DataSync Location.
*
*/
@Export(name="arn", refs={String.class}, tree="[0]")
private Output arn;
/**
* @return Amazon Resource Name (ARN) of the DataSync Location.
*
*/
public Output arn() {
return this.arn;
}
/**
* The type of authentication used to determine the identity of the user. Valid values are `SIMPLE` and `KERBEROS`.
*
*/
@Export(name="authenticationType", refs={String.class}, tree="[0]")
private Output* @Nullable */ String> authenticationType;
/**
* @return The type of authentication used to determine the identity of the user. Valid values are `SIMPLE` and `KERBEROS`.
*
*/
public Output> authenticationType() {
return Codegen.optional(this.authenticationType);
}
/**
* The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
*
*/
@Export(name="blockSize", refs={Integer.class}, tree="[0]")
private Output* @Nullable */ Integer> blockSize;
/**
* @return The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
*
*/
public Output> blockSize() {
return Codegen.optional(this.blockSize);
}
/**
* The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use `kerberos_keytab_base64` instead whenever the value is not a valid UTF-8 string. If `KERBEROS` is specified for `authentication_type`, this parameter (or `kerberos_keytab_base64`) is required.
*
*/
@Export(name="kerberosKeytab", refs={String.class}, tree="[0]")
private Output* @Nullable */ String> kerberosKeytab;
/**
* @return The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use `kerberos_keytab_base64` instead whenever the value is not a valid UTF-8 string. If `KERBEROS` is specified for `authentication_type`, this parameter (or `kerberos_keytab_base64`) is required.
*
*/
public Output> kerberosKeytab() {
return Codegen.optional(this.kerberosKeytab);
}
/**
* Use instead of `kerberos_keytab` to pass base64-encoded binary data directly. If `KERBEROS` is specified for `authentication_type`, this parameter (or `kerberos_keytab`) is required.
*
*/
@Export(name="kerberosKeytabBase64", refs={String.class}, tree="[0]")
private Output* @Nullable */ String> kerberosKeytabBase64;
/**
* @return Use instead of `kerberos_keytab` to pass base64-encoded binary data directly. If `KERBEROS` is specified for `authentication_type`, this parameter (or `kerberos_keytab`) is required.
*
*/
public Output> kerberosKeytabBase64() {
return Codegen.optional(this.kerberosKeytabBase64);
}
/**
* The krb5.conf file that contains the Kerberos configuration information. Use `kerberos_krb5_conf_base64` instead whenever the value is not a valid UTF-8 string. If `KERBEROS` is specified for `authentication_type`, this parameter (or `kerberos_krb5_conf_base64`) is required.
*
*/
@Export(name="kerberosKrb5Conf", refs={String.class}, tree="[0]")
private Output* @Nullable */ String> kerberosKrb5Conf;
/**
* @return The krb5.conf file that contains the Kerberos configuration information. Use `kerberos_krb5_conf_base64` instead whenever the value is not a valid UTF-8 string. If `KERBEROS` is specified for `authentication_type`, this parameter (or `kerberos_krb5_conf_base64`) is required.
*
*/
public Output> kerberosKrb5Conf() {
return Codegen.optional(this.kerberosKrb5Conf);
}
/**
* Use instead of `kerberos_krb5_conf` to pass base64-encoded binary data directly. If `KERBEROS` is specified for `authentication_type`, this parameter (or `kerberos_krb5_conf`) is required.
*
*/
@Export(name="kerberosKrb5ConfBase64", refs={String.class}, tree="[0]")
private Output* @Nullable */ String> kerberosKrb5ConfBase64;
/**
* @return Use instead of `kerberos_krb5_conf` to pass base64-encoded binary data directly. If `KERBEROS` is specified for `authentication_type`, this parameter (or `kerberos_krb5_conf`) is required.
*
*/
public Output> kerberosKrb5ConfBase64() {
return Codegen.optional(this.kerberosKrb5ConfBase64);
}
/**
* The Kerberos principal with access to the files and folders on the HDFS cluster. If `KERBEROS` is specified for `authentication_type`, this parameter is required.
*
*/
@Export(name="kerberosPrincipal", refs={String.class}, tree="[0]")
private Output* @Nullable */ String> kerberosPrincipal;
/**
* @return The Kerberos principal with access to the files and folders on the HDFS cluster. If `KERBEROS` is specified for `authentication_type`, this parameter is required.
*
*/
public Output> kerberosPrincipal() {
return Codegen.optional(this.kerberosPrincipal);
}
/**
* The URI of the HDFS cluster's Key Management Server (KMS).
*
*/
@Export(name="kmsKeyProviderUri", refs={String.class}, tree="[0]")
private Output* @Nullable */ String> kmsKeyProviderUri;
/**
* @return The URI of the HDFS cluster's Key Management Server (KMS).
*
*/
public Output> kmsKeyProviderUri() {
return Codegen.optional(this.kmsKeyProviderUri);
}
/**
* The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
*
*/
@Export(name="nameNodes", refs={List.class,LocationHdfsNameNode.class}, tree="[0,1]")
private Output> nameNodes;
/**
* @return The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
*
*/
public Output> nameNodes() {
return this.nameNodes;
}
/**
* The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If `qop_configuration` isn't specified, `rpc_protection` and `data_transfer_protection` default to `PRIVACY`. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below.
*
*/
@Export(name="qopConfiguration", refs={LocationHdfsQopConfiguration.class}, tree="[0]")
private Output qopConfiguration;
/**
* @return The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If `qop_configuration` isn't specified, `rpc_protection` and `data_transfer_protection` default to `PRIVACY`. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below.
*
*/
public Output qopConfiguration() {
return this.qopConfiguration;
}
/**
* The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
*
*/
@Export(name="replicationFactor", refs={Integer.class}, tree="[0]")
private Output* @Nullable */ Integer> replicationFactor;
/**
* @return The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
*
*/
public Output> replicationFactor() {
return Codegen.optional(this.replicationFactor);
}
/**
* The user name used to identify the client on the host operating system. If `SIMPLE` is specified for `authentication_type`, this parameter is required.
*
*/
@Export(name="simpleUser", refs={String.class}, tree="[0]")
private Output* @Nullable */ String> simpleUser;
/**
* @return The user name used to identify the client on the host operating system. If `SIMPLE` is specified for `authentication_type`, this parameter is required.
*
*/
public Output> simpleUser() {
return Codegen.optional(this.simpleUser);
}
/**
* A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
*
*/
@Export(name="subdirectory", refs={String.class}, tree="[0]")
private Output* @Nullable */ String> subdirectory;
/**
* @return A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
*
*/
public Output> subdirectory() {
return Codegen.optional(this.subdirectory);
}
/**
* Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
*
*/
@Export(name="tags", refs={Map.class,String.class}, tree="[0,1,1]")
private Output* @Nullable */ Map> tags;
/**
* @return Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
*
*/
public Output>> tags() {
return Codegen.optional(this.tags);
}
/**
* A map of tags assigned to the resource, including those inherited from the provider `default_tags` configuration block.
*
* @deprecated
* Please use `tags` instead.
*
*/
@Deprecated /* Please use `tags` instead. */
@Export(name="tagsAll", refs={Map.class,String.class}, tree="[0,1,1]")
private Output
© 2015 - 2025 Weber Informatics LLC | Privacy Policy