com.pulumi.azure.hdinsight.inputs.SparkClusterMetastoresHiveArgs Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of azure Show documentation
Show all versions of azure Show documentation
A Pulumi package for creating and managing Microsoft Azure cloud resources, based on the Terraform azurerm provider. We recommend using the [Azure Native provider](https://github.com/pulumi/pulumi-azure-native) to provision Azure infrastructure. Azure Native provides complete coverage of Azure resources and same-day access to new resources and resource updates.
// *** WARNING: this file was generated by pulumi-java-gen. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package com.pulumi.azure.hdinsight.inputs;
import com.pulumi.core.Output;
import com.pulumi.core.annotations.Import;
import com.pulumi.exceptions.MissingRequiredPropertyException;
import java.lang.String;
import java.util.Objects;
public final class SparkClusterMetastoresHiveArgs extends com.pulumi.resources.ResourceArgs {
public static final SparkClusterMetastoresHiveArgs Empty = new SparkClusterMetastoresHiveArgs();
/**
* The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
*
*/
@Import(name="databaseName", required=true)
private Output databaseName;
/**
* @return The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
*
*/
public Output databaseName() {
return this.databaseName;
}
/**
* The external Hive metastore's existing SQL server admin password. Changing this forces a new resource to be created.
*
*/
@Import(name="password", required=true)
private Output password;
/**
* @return The external Hive metastore's existing SQL server admin password. Changing this forces a new resource to be created.
*
*/
public Output password() {
return this.password;
}
/**
* The fully-qualified domain name (FQDN) of the SQL server to use for the external Hive metastore. Changing this forces a new resource to be created.
*
*/
@Import(name="server", required=true)
private Output server;
/**
* @return The fully-qualified domain name (FQDN) of the SQL server to use for the external Hive metastore. Changing this forces a new resource to be created.
*
*/
public Output server() {
return this.server;
}
/**
* The external Hive metastore's existing SQL server admin username. Changing this forces a new resource to be created.
*
*/
@Import(name="username", required=true)
private Output username;
/**
* @return The external Hive metastore's existing SQL server admin username. Changing this forces a new resource to be created.
*
*/
public Output username() {
return this.username;
}
private SparkClusterMetastoresHiveArgs() {}
private SparkClusterMetastoresHiveArgs(SparkClusterMetastoresHiveArgs $) {
this.databaseName = $.databaseName;
this.password = $.password;
this.server = $.server;
this.username = $.username;
}
public static Builder builder() {
return new Builder();
}
public static Builder builder(SparkClusterMetastoresHiveArgs defaults) {
return new Builder(defaults);
}
public static final class Builder {
private SparkClusterMetastoresHiveArgs $;
public Builder() {
$ = new SparkClusterMetastoresHiveArgs();
}
public Builder(SparkClusterMetastoresHiveArgs defaults) {
$ = new SparkClusterMetastoresHiveArgs(Objects.requireNonNull(defaults));
}
/**
* @param databaseName The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
*
* @return builder
*
*/
public Builder databaseName(Output databaseName) {
$.databaseName = databaseName;
return this;
}
/**
* @param databaseName The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
*
* @return builder
*
*/
public Builder databaseName(String databaseName) {
return databaseName(Output.of(databaseName));
}
/**
* @param password The external Hive metastore's existing SQL server admin password. Changing this forces a new resource to be created.
*
* @return builder
*
*/
public Builder password(Output password) {
$.password = password;
return this;
}
/**
* @param password The external Hive metastore's existing SQL server admin password. Changing this forces a new resource to be created.
*
* @return builder
*
*/
public Builder password(String password) {
return password(Output.of(password));
}
/**
* @param server The fully-qualified domain name (FQDN) of the SQL server to use for the external Hive metastore. Changing this forces a new resource to be created.
*
* @return builder
*
*/
public Builder server(Output server) {
$.server = server;
return this;
}
/**
* @param server The fully-qualified domain name (FQDN) of the SQL server to use for the external Hive metastore. Changing this forces a new resource to be created.
*
* @return builder
*
*/
public Builder server(String server) {
return server(Output.of(server));
}
/**
* @param username The external Hive metastore's existing SQL server admin username. Changing this forces a new resource to be created.
*
* @return builder
*
*/
public Builder username(Output username) {
$.username = username;
return this;
}
/**
* @param username The external Hive metastore's existing SQL server admin username. Changing this forces a new resource to be created.
*
* @return builder
*
*/
public Builder username(String username) {
return username(Output.of(username));
}
public SparkClusterMetastoresHiveArgs build() {
if ($.databaseName == null) {
throw new MissingRequiredPropertyException("SparkClusterMetastoresHiveArgs", "databaseName");
}
if ($.password == null) {
throw new MissingRequiredPropertyException("SparkClusterMetastoresHiveArgs", "password");
}
if ($.server == null) {
throw new MissingRequiredPropertyException("SparkClusterMetastoresHiveArgs", "server");
}
if ($.username == null) {
throw new MissingRequiredPropertyException("SparkClusterMetastoresHiveArgs", "username");
}
return $;
}
}
}