All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.aws.sagemaker.inputs.DataQualityJobDefinitionDataQualityJobInputBatchTransformInputArgs Maven / Gradle / Ivy

Go to download

A Pulumi package for creating and managing Amazon Web Services (AWS) cloud resources.

There is a newer version: 6.60.0-alpha.1731982519
Show newest version
// *** WARNING: this file was generated by pulumi-java-gen. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***

package com.pulumi.aws.sagemaker.inputs;

import com.pulumi.aws.sagemaker.inputs.DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatArgs;
import com.pulumi.core.Output;
import com.pulumi.core.annotations.Import;
import com.pulumi.exceptions.MissingRequiredPropertyException;
import java.lang.String;
import java.util.Objects;
import java.util.Optional;
import javax.annotation.Nullable;


public final class DataQualityJobDefinitionDataQualityJobInputBatchTransformInputArgs extends com.pulumi.resources.ResourceArgs {

    public static final DataQualityJobDefinitionDataQualityJobInputBatchTransformInputArgs Empty = new DataQualityJobDefinitionDataQualityJobInputBatchTransformInputArgs();

    /**
     * The Amazon S3 location being used to capture the data.
     * 
     */
    @Import(name="dataCapturedDestinationS3Uri", required=true)
    private Output dataCapturedDestinationS3Uri;

    /**
     * @return The Amazon S3 location being used to capture the data.
     * 
     */
    public Output dataCapturedDestinationS3Uri() {
        return this.dataCapturedDestinationS3Uri;
    }

    /**
     * The dataset format for your batch transform job. Fields are documented below.
     * 
     */
    @Import(name="datasetFormat", required=true)
    private Output datasetFormat;

    /**
     * @return The dataset format for your batch transform job. Fields are documented below.
     * 
     */
    public Output datasetFormat() {
        return this.datasetFormat;
    }

    /**
     * Path to the filesystem where the batch transform data is available to the container. Defaults to `/opt/ml/processing/input`.
     * 
     */
    @Import(name="localPath")
    private @Nullable Output localPath;

    /**
     * @return Path to the filesystem where the batch transform data is available to the container. Defaults to `/opt/ml/processing/input`.
     * 
     */
    public Optional> localPath() {
        return Optional.ofNullable(this.localPath);
    }

    /**
     * Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to `FullyReplicated`. Valid values are `FullyReplicated` or `ShardedByS3Key`
     * 
     */
    @Import(name="s3DataDistributionType")
    private @Nullable Output s3DataDistributionType;

    /**
     * @return Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to `FullyReplicated`. Valid values are `FullyReplicated` or `ShardedByS3Key`
     * 
     */
    public Optional> s3DataDistributionType() {
        return Optional.ofNullable(this.s3DataDistributionType);
    }

    /**
     * Whether the `Pipe` or `File` is used as the input mode for transferring data for the monitoring job. `Pipe` mode is recommended for large datasets. `File` mode is useful for small files that fit in memory. Defaults to `File`.  Valid values are `Pipe` or `File`
     * 
     */
    @Import(name="s3InputMode")
    private @Nullable Output s3InputMode;

    /**
     * @return Whether the `Pipe` or `File` is used as the input mode for transferring data for the monitoring job. `Pipe` mode is recommended for large datasets. `File` mode is useful for small files that fit in memory. Defaults to `File`.  Valid values are `Pipe` or `File`
     * 
     */
    public Optional> s3InputMode() {
        return Optional.ofNullable(this.s3InputMode);
    }

    private DataQualityJobDefinitionDataQualityJobInputBatchTransformInputArgs() {}

    private DataQualityJobDefinitionDataQualityJobInputBatchTransformInputArgs(DataQualityJobDefinitionDataQualityJobInputBatchTransformInputArgs $) {
        this.dataCapturedDestinationS3Uri = $.dataCapturedDestinationS3Uri;
        this.datasetFormat = $.datasetFormat;
        this.localPath = $.localPath;
        this.s3DataDistributionType = $.s3DataDistributionType;
        this.s3InputMode = $.s3InputMode;
    }

    public static Builder builder() {
        return new Builder();
    }
    public static Builder builder(DataQualityJobDefinitionDataQualityJobInputBatchTransformInputArgs defaults) {
        return new Builder(defaults);
    }

    public static final class Builder {
        private DataQualityJobDefinitionDataQualityJobInputBatchTransformInputArgs $;

        public Builder() {
            $ = new DataQualityJobDefinitionDataQualityJobInputBatchTransformInputArgs();
        }

        public Builder(DataQualityJobDefinitionDataQualityJobInputBatchTransformInputArgs defaults) {
            $ = new DataQualityJobDefinitionDataQualityJobInputBatchTransformInputArgs(Objects.requireNonNull(defaults));
        }

        /**
         * @param dataCapturedDestinationS3Uri The Amazon S3 location being used to capture the data.
         * 
         * @return builder
         * 
         */
        public Builder dataCapturedDestinationS3Uri(Output dataCapturedDestinationS3Uri) {
            $.dataCapturedDestinationS3Uri = dataCapturedDestinationS3Uri;
            return this;
        }

        /**
         * @param dataCapturedDestinationS3Uri The Amazon S3 location being used to capture the data.
         * 
         * @return builder
         * 
         */
        public Builder dataCapturedDestinationS3Uri(String dataCapturedDestinationS3Uri) {
            return dataCapturedDestinationS3Uri(Output.of(dataCapturedDestinationS3Uri));
        }

        /**
         * @param datasetFormat The dataset format for your batch transform job. Fields are documented below.
         * 
         * @return builder
         * 
         */
        public Builder datasetFormat(Output datasetFormat) {
            $.datasetFormat = datasetFormat;
            return this;
        }

        /**
         * @param datasetFormat The dataset format for your batch transform job. Fields are documented below.
         * 
         * @return builder
         * 
         */
        public Builder datasetFormat(DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatArgs datasetFormat) {
            return datasetFormat(Output.of(datasetFormat));
        }

        /**
         * @param localPath Path to the filesystem where the batch transform data is available to the container. Defaults to `/opt/ml/processing/input`.
         * 
         * @return builder
         * 
         */
        public Builder localPath(@Nullable Output localPath) {
            $.localPath = localPath;
            return this;
        }

        /**
         * @param localPath Path to the filesystem where the batch transform data is available to the container. Defaults to `/opt/ml/processing/input`.
         * 
         * @return builder
         * 
         */
        public Builder localPath(String localPath) {
            return localPath(Output.of(localPath));
        }

        /**
         * @param s3DataDistributionType Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to `FullyReplicated`. Valid values are `FullyReplicated` or `ShardedByS3Key`
         * 
         * @return builder
         * 
         */
        public Builder s3DataDistributionType(@Nullable Output s3DataDistributionType) {
            $.s3DataDistributionType = s3DataDistributionType;
            return this;
        }

        /**
         * @param s3DataDistributionType Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to `FullyReplicated`. Valid values are `FullyReplicated` or `ShardedByS3Key`
         * 
         * @return builder
         * 
         */
        public Builder s3DataDistributionType(String s3DataDistributionType) {
            return s3DataDistributionType(Output.of(s3DataDistributionType));
        }

        /**
         * @param s3InputMode Whether the `Pipe` or `File` is used as the input mode for transferring data for the monitoring job. `Pipe` mode is recommended for large datasets. `File` mode is useful for small files that fit in memory. Defaults to `File`.  Valid values are `Pipe` or `File`
         * 
         * @return builder
         * 
         */
        public Builder s3InputMode(@Nullable Output s3InputMode) {
            $.s3InputMode = s3InputMode;
            return this;
        }

        /**
         * @param s3InputMode Whether the `Pipe` or `File` is used as the input mode for transferring data for the monitoring job. `Pipe` mode is recommended for large datasets. `File` mode is useful for small files that fit in memory. Defaults to `File`.  Valid values are `Pipe` or `File`
         * 
         * @return builder
         * 
         */
        public Builder s3InputMode(String s3InputMode) {
            return s3InputMode(Output.of(s3InputMode));
        }

        public DataQualityJobDefinitionDataQualityJobInputBatchTransformInputArgs build() {
            if ($.dataCapturedDestinationS3Uri == null) {
                throw new MissingRequiredPropertyException("DataQualityJobDefinitionDataQualityJobInputBatchTransformInputArgs", "dataCapturedDestinationS3Uri");
            }
            if ($.datasetFormat == null) {
                throw new MissingRequiredPropertyException("DataQualityJobDefinitionDataQualityJobInputBatchTransformInputArgs", "datasetFormat");
            }
            return $;
        }
    }

}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy