
com.pulumi.azurenative.machinelearningservices.outputs.BatchDeploymentResponse Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of azure-native Show documentation
Show all versions of azure-native Show documentation
A native Pulumi package for creating and managing Azure resources.
// *** WARNING: this file was generated by pulumi-java-gen. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package com.pulumi.azurenative.machinelearningservices.outputs;
import com.pulumi.azurenative.machinelearningservices.outputs.BatchRetrySettingsResponse;
import com.pulumi.azurenative.machinelearningservices.outputs.CodeConfigurationResponse;
import com.pulumi.azurenative.machinelearningservices.outputs.DataPathAssetReferenceResponse;
import com.pulumi.azurenative.machinelearningservices.outputs.DeploymentResourceConfigurationResponse;
import com.pulumi.azurenative.machinelearningservices.outputs.IdAssetReferenceResponse;
import com.pulumi.azurenative.machinelearningservices.outputs.OutputPathAssetReferenceResponse;
import com.pulumi.core.annotations.CustomType;
import com.pulumi.exceptions.MissingRequiredPropertyException;
import java.lang.Double;
import java.lang.Integer;
import java.lang.Object;
import java.lang.String;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import javax.annotation.Nullable;
@CustomType
public final class BatchDeploymentResponse {
/**
* @return Code configuration for the endpoint deployment.
*
*/
private @Nullable CodeConfigurationResponse codeConfiguration;
/**
* @return Compute target for batch inference operation.
*
*/
private @Nullable String compute;
/**
* @return Description of the endpoint deployment.
*
*/
private @Nullable String description;
/**
* @return ARM resource ID or AssetId of the environment specification for the endpoint deployment.
*
*/
private @Nullable String environmentId;
/**
* @return Environment variables configuration for the deployment.
*
*/
private @Nullable Map environmentVariables;
/**
* @return Error threshold, if the error count for the entire input goes above this value,
* the batch inference will be aborted. Range is [-1, int.MaxValue].
* For FileDataset, this value is the count of file failures.
* For TabularDataset, this value is the count of record failures.
* If set to -1 (the lower bound), all failures during batch inference will be ignored.
*
*/
private @Nullable Integer errorThreshold;
/**
* @return Logging level for batch inference operation.
*
*/
private @Nullable String loggingLevel;
/**
* @return Indicates maximum number of parallelism per instance.
*
*/
private @Nullable Integer maxConcurrencyPerInstance;
/**
* @return Size of the mini-batch passed to each batch invocation.
* For FileDataset, this is the number of files per mini-batch.
* For TabularDataset, this is the size of the records in bytes, per mini-batch.
*
*/
private @Nullable Double miniBatchSize;
/**
* @return Reference to the model asset for the endpoint deployment.
*
*/
private @Nullable Object model;
/**
* @return Indicates how the output will be organized.
*
*/
private @Nullable String outputAction;
/**
* @return Customized output file name for append_row output action.
*
*/
private @Nullable String outputFileName;
/**
* @return Property dictionary. Properties can be added, but not removed or altered.
*
*/
private @Nullable Map properties;
/**
* @return Provisioning state for the endpoint deployment.
*
*/
private String provisioningState;
/**
* @return Indicates compute configuration for the job.
* If not provided, will default to the defaults defined in ResourceConfiguration.
*
*/
private @Nullable DeploymentResourceConfigurationResponse resources;
/**
* @return Retry Settings for the batch inference operation.
* If not provided, will default to the defaults defined in BatchRetrySettings.
*
*/
private @Nullable BatchRetrySettingsResponse retrySettings;
private BatchDeploymentResponse() {}
/**
* @return Code configuration for the endpoint deployment.
*
*/
public Optional codeConfiguration() {
return Optional.ofNullable(this.codeConfiguration);
}
/**
* @return Compute target for batch inference operation.
*
*/
public Optional compute() {
return Optional.ofNullable(this.compute);
}
/**
* @return Description of the endpoint deployment.
*
*/
public Optional description() {
return Optional.ofNullable(this.description);
}
/**
* @return ARM resource ID or AssetId of the environment specification for the endpoint deployment.
*
*/
public Optional environmentId() {
return Optional.ofNullable(this.environmentId);
}
/**
* @return Environment variables configuration for the deployment.
*
*/
public Map environmentVariables() {
return this.environmentVariables == null ? Map.of() : this.environmentVariables;
}
/**
* @return Error threshold, if the error count for the entire input goes above this value,
* the batch inference will be aborted. Range is [-1, int.MaxValue].
* For FileDataset, this value is the count of file failures.
* For TabularDataset, this value is the count of record failures.
* If set to -1 (the lower bound), all failures during batch inference will be ignored.
*
*/
public Optional errorThreshold() {
return Optional.ofNullable(this.errorThreshold);
}
/**
* @return Logging level for batch inference operation.
*
*/
public Optional loggingLevel() {
return Optional.ofNullable(this.loggingLevel);
}
/**
* @return Indicates maximum number of parallelism per instance.
*
*/
public Optional maxConcurrencyPerInstance() {
return Optional.ofNullable(this.maxConcurrencyPerInstance);
}
/**
* @return Size of the mini-batch passed to each batch invocation.
* For FileDataset, this is the number of files per mini-batch.
* For TabularDataset, this is the size of the records in bytes, per mini-batch.
*
*/
public Optional miniBatchSize() {
return Optional.ofNullable(this.miniBatchSize);
}
/**
* @return Reference to the model asset for the endpoint deployment.
*
*/
public Optional
© 2015 - 2025 Weber Informatics LLC | Privacy Policy