All Downloads are FREE. Search and download functionalities are using the official Maven repository.

software.amazon.awssdk.services.personalize.model.CreateDatasetImportJobRequest Maven / Gradle / Ivy

Go to download

The AWS Java SDK for Personalize module holds the client classes that are used for communicating with Personalize.

There is a newer version: 2.29.15
Show newest version
/*
 * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
 * 
 * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
 * the License. A copy of the License is located at
 * 
 * http://aws.amazon.com/apache2.0
 * 
 * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
 * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
 * and limitations under the License.
 */

package software.amazon.awssdk.services.personalize.model;

import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import software.amazon.awssdk.annotations.Generated;
import software.amazon.awssdk.awscore.AwsRequestOverrideConfiguration;
import software.amazon.awssdk.core.SdkField;
import software.amazon.awssdk.core.SdkPojo;
import software.amazon.awssdk.core.protocol.MarshallLocation;
import software.amazon.awssdk.core.protocol.MarshallingType;
import software.amazon.awssdk.core.traits.ListTrait;
import software.amazon.awssdk.core.traits.LocationTrait;
import software.amazon.awssdk.core.util.DefaultSdkAutoConstructList;
import software.amazon.awssdk.core.util.SdkAutoConstructList;
import software.amazon.awssdk.utils.ToString;
import software.amazon.awssdk.utils.builder.CopyableBuilder;
import software.amazon.awssdk.utils.builder.ToCopyableBuilder;

/**
 */
@Generated("software.amazon.awssdk:codegen")
public final class CreateDatasetImportJobRequest extends PersonalizeRequest implements
        ToCopyableBuilder {
    private static final SdkField JOB_NAME_FIELD = SdkField. builder(MarshallingType.STRING)
            .memberName("jobName").getter(getter(CreateDatasetImportJobRequest::jobName)).setter(setter(Builder::jobName))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("jobName").build()).build();

    private static final SdkField DATASET_ARN_FIELD = SdkField. builder(MarshallingType.STRING)
            .memberName("datasetArn").getter(getter(CreateDatasetImportJobRequest::datasetArn))
            .setter(setter(Builder::datasetArn))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("datasetArn").build()).build();

    private static final SdkField DATA_SOURCE_FIELD = SdkField. builder(MarshallingType.SDK_POJO)
            .memberName("dataSource").getter(getter(CreateDatasetImportJobRequest::dataSource))
            .setter(setter(Builder::dataSource)).constructor(DataSource::builder)
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("dataSource").build()).build();

    private static final SdkField ROLE_ARN_FIELD = SdkField. builder(MarshallingType.STRING)
            .memberName("roleArn").getter(getter(CreateDatasetImportJobRequest::roleArn)).setter(setter(Builder::roleArn))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("roleArn").build()).build();

    private static final SdkField> TAGS_FIELD = SdkField
            .> builder(MarshallingType.LIST)
            .memberName("tags")
            .getter(getter(CreateDatasetImportJobRequest::tags))
            .setter(setter(Builder::tags))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("tags").build(),
                    ListTrait
                            .builder()
                            .memberLocationName(null)
                            .memberFieldInfo(
                                    SdkField. builder(MarshallingType.SDK_POJO)
                                            .constructor(Tag::builder)
                                            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD)
                                                    .locationName("member").build()).build()).build()).build();

    private static final SdkField IMPORT_MODE_FIELD = SdkField. builder(MarshallingType.STRING)
            .memberName("importMode").getter(getter(CreateDatasetImportJobRequest::importModeAsString))
            .setter(setter(Builder::importMode))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("importMode").build()).build();

    private static final SdkField PUBLISH_ATTRIBUTION_METRICS_TO_S3_FIELD = SdkField
            . builder(MarshallingType.BOOLEAN)
            .memberName("publishAttributionMetricsToS3")
            .getter(getter(CreateDatasetImportJobRequest::publishAttributionMetricsToS3))
            .setter(setter(Builder::publishAttributionMetricsToS3))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("publishAttributionMetricsToS3")
                    .build()).build();

    private static final List> SDK_FIELDS = Collections.unmodifiableList(Arrays.asList(JOB_NAME_FIELD,
            DATASET_ARN_FIELD, DATA_SOURCE_FIELD, ROLE_ARN_FIELD, TAGS_FIELD, IMPORT_MODE_FIELD,
            PUBLISH_ATTRIBUTION_METRICS_TO_S3_FIELD));

    private final String jobName;

    private final String datasetArn;

    private final DataSource dataSource;

    private final String roleArn;

    private final List tags;

    private final String importMode;

    private final Boolean publishAttributionMetricsToS3;

    private CreateDatasetImportJobRequest(BuilderImpl builder) {
        super(builder);
        this.jobName = builder.jobName;
        this.datasetArn = builder.datasetArn;
        this.dataSource = builder.dataSource;
        this.roleArn = builder.roleArn;
        this.tags = builder.tags;
        this.importMode = builder.importMode;
        this.publishAttributionMetricsToS3 = builder.publishAttributionMetricsToS3;
    }

    /**
     * 

* The name for the dataset import job. *

* * @return The name for the dataset import job. */ public final String jobName() { return jobName; } /** *

* The ARN of the dataset that receives the imported data. *

* * @return The ARN of the dataset that receives the imported data. */ public final String datasetArn() { return datasetArn; } /** *

* The Amazon S3 bucket that contains the training data to import. *

* * @return The Amazon S3 bucket that contains the training data to import. */ public final DataSource dataSource() { return dataSource; } /** *

* The ARN of the IAM role that has permissions to read from the Amazon S3 data source. *

* * @return The ARN of the IAM role that has permissions to read from the Amazon S3 data source. */ public final String roleArn() { return roleArn; } /** * For responses, this returns true if the service returned a value for the Tags property. This DOES NOT check that * the value is non-empty (for which, you should check the {@code isEmpty()} method on the property). This is useful * because the SDK will never return a null collection or map, but you may need to differentiate between the service * returning nothing (or null) and the service returning an empty collection or map. For requests, this returns true * if a value for the property was specified in the request builder, and false if a value was not specified. */ public final boolean hasTags() { return tags != null && !(tags instanceof SdkAutoConstructList); } /** *

* A list of tags to apply to * the dataset import job. *

*

* Attempts to modify the collection returned by this method will result in an UnsupportedOperationException. *

*

* This method will never return null. If you would like to know whether the service returned this field (so that * you can differentiate between null and empty), you can use the {@link #hasTags} method. *

* * @return A list of tags to * apply to the dataset import job. */ public final List tags() { return tags; } /** *

* Specify how to add the new records to an existing dataset. The default import mode is FULL. If you * haven't imported bulk records into the dataset previously, you can only specify FULL. *

*
    *
  • *

    * Specify FULL to overwrite all existing bulk data in your dataset. Data you imported individually is * not replaced. *

    *
  • *
  • *

    * Specify INCREMENTAL to append the new records to the existing data in your dataset. Amazon * Personalize replaces any record with the same ID with the new one. *

    *
  • *
*

* If the service returns an enum value that is not available in the current SDK version, {@link #importMode} will * return {@link ImportMode#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from * {@link #importModeAsString}. *

* * @return Specify how to add the new records to an existing dataset. The default import mode is FULL. * If you haven't imported bulk records into the dataset previously, you can only specify FULL * .

*
    *
  • *

    * Specify FULL to overwrite all existing bulk data in your dataset. Data you imported * individually is not replaced. *

    *
  • *
  • *

    * Specify INCREMENTAL to append the new records to the existing data in your dataset. Amazon * Personalize replaces any record with the same ID with the new one. *

    *
  • * @see ImportMode */ public final ImportMode importMode() { return ImportMode.fromValue(importMode); } /** *

    * Specify how to add the new records to an existing dataset. The default import mode is FULL. If you * haven't imported bulk records into the dataset previously, you can only specify FULL. *

    *
      *
    • *

      * Specify FULL to overwrite all existing bulk data in your dataset. Data you imported individually is * not replaced. *

      *
    • *
    • *

      * Specify INCREMENTAL to append the new records to the existing data in your dataset. Amazon * Personalize replaces any record with the same ID with the new one. *

      *
    • *
    *

    * If the service returns an enum value that is not available in the current SDK version, {@link #importMode} will * return {@link ImportMode#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from * {@link #importModeAsString}. *

    * * @return Specify how to add the new records to an existing dataset. The default import mode is FULL. * If you haven't imported bulk records into the dataset previously, you can only specify FULL * .

    *
      *
    • *

      * Specify FULL to overwrite all existing bulk data in your dataset. Data you imported * individually is not replaced. *

      *
    • *
    • *

      * Specify INCREMENTAL to append the new records to the existing data in your dataset. Amazon * Personalize replaces any record with the same ID with the new one. *

      *
    • * @see ImportMode */ public final String importModeAsString() { return importMode; } /** *

      * If you created a metric attribution, specify whether to publish metrics for this import job to Amazon S3 *

      * * @return If you created a metric attribution, specify whether to publish metrics for this import job to Amazon S3 */ public final Boolean publishAttributionMetricsToS3() { return publishAttributionMetricsToS3; } @Override public Builder toBuilder() { return new BuilderImpl(this); } public static Builder builder() { return new BuilderImpl(); } public static Class serializableBuilderClass() { return BuilderImpl.class; } @Override public final int hashCode() { int hashCode = 1; hashCode = 31 * hashCode + super.hashCode(); hashCode = 31 * hashCode + Objects.hashCode(jobName()); hashCode = 31 * hashCode + Objects.hashCode(datasetArn()); hashCode = 31 * hashCode + Objects.hashCode(dataSource()); hashCode = 31 * hashCode + Objects.hashCode(roleArn()); hashCode = 31 * hashCode + Objects.hashCode(hasTags() ? tags() : null); hashCode = 31 * hashCode + Objects.hashCode(importModeAsString()); hashCode = 31 * hashCode + Objects.hashCode(publishAttributionMetricsToS3()); return hashCode; } @Override public final boolean equals(Object obj) { return super.equals(obj) && equalsBySdkFields(obj); } @Override public final boolean equalsBySdkFields(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (!(obj instanceof CreateDatasetImportJobRequest)) { return false; } CreateDatasetImportJobRequest other = (CreateDatasetImportJobRequest) obj; return Objects.equals(jobName(), other.jobName()) && Objects.equals(datasetArn(), other.datasetArn()) && Objects.equals(dataSource(), other.dataSource()) && Objects.equals(roleArn(), other.roleArn()) && hasTags() == other.hasTags() && Objects.equals(tags(), other.tags()) && Objects.equals(importModeAsString(), other.importModeAsString()) && Objects.equals(publishAttributionMetricsToS3(), other.publishAttributionMetricsToS3()); } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. */ @Override public final String toString() { return ToString.builder("CreateDatasetImportJobRequest").add("JobName", jobName()).add("DatasetArn", datasetArn()) .add("DataSource", dataSource()).add("RoleArn", roleArn()).add("Tags", hasTags() ? tags() : null) .add("ImportMode", importModeAsString()).add("PublishAttributionMetricsToS3", publishAttributionMetricsToS3()) .build(); } public final Optional getValueForField(String fieldName, Class clazz) { switch (fieldName) { case "jobName": return Optional.ofNullable(clazz.cast(jobName())); case "datasetArn": return Optional.ofNullable(clazz.cast(datasetArn())); case "dataSource": return Optional.ofNullable(clazz.cast(dataSource())); case "roleArn": return Optional.ofNullable(clazz.cast(roleArn())); case "tags": return Optional.ofNullable(clazz.cast(tags())); case "importMode": return Optional.ofNullable(clazz.cast(importModeAsString())); case "publishAttributionMetricsToS3": return Optional.ofNullable(clazz.cast(publishAttributionMetricsToS3())); default: return Optional.empty(); } } @Override public final List> sdkFields() { return SDK_FIELDS; } private static Function getter(Function g) { return obj -> g.apply((CreateDatasetImportJobRequest) obj); } private static BiConsumer setter(BiConsumer s) { return (obj, val) -> s.accept((Builder) obj, val); } public interface Builder extends PersonalizeRequest.Builder, SdkPojo, CopyableBuilder { /** *

      * The name for the dataset import job. *

      * * @param jobName * The name for the dataset import job. * @return Returns a reference to this object so that method calls can be chained together. */ Builder jobName(String jobName); /** *

      * The ARN of the dataset that receives the imported data. *

      * * @param datasetArn * The ARN of the dataset that receives the imported data. * @return Returns a reference to this object so that method calls can be chained together. */ Builder datasetArn(String datasetArn); /** *

      * The Amazon S3 bucket that contains the training data to import. *

      * * @param dataSource * The Amazon S3 bucket that contains the training data to import. * @return Returns a reference to this object so that method calls can be chained together. */ Builder dataSource(DataSource dataSource); /** *

      * The Amazon S3 bucket that contains the training data to import. *

      * This is a convenience method that creates an instance of the {@link DataSource.Builder} avoiding the need to * create one manually via {@link DataSource#builder()}. * *

      * When the {@link Consumer} completes, {@link DataSource.Builder#build()} is called immediately and its result * is passed to {@link #dataSource(DataSource)}. * * @param dataSource * a consumer that will call methods on {@link DataSource.Builder} * @return Returns a reference to this object so that method calls can be chained together. * @see #dataSource(DataSource) */ default Builder dataSource(Consumer dataSource) { return dataSource(DataSource.builder().applyMutation(dataSource).build()); } /** *

      * The ARN of the IAM role that has permissions to read from the Amazon S3 data source. *

      * * @param roleArn * The ARN of the IAM role that has permissions to read from the Amazon S3 data source. * @return Returns a reference to this object so that method calls can be chained together. */ Builder roleArn(String roleArn); /** *

      * A list of tags to * apply to the dataset import job. *

      * * @param tags * A list of tags * to apply to the dataset import job. * @return Returns a reference to this object so that method calls can be chained together. */ Builder tags(Collection tags); /** *

      * A list of tags to * apply to the dataset import job. *

      * * @param tags * A list of tags * to apply to the dataset import job. * @return Returns a reference to this object so that method calls can be chained together. */ Builder tags(Tag... tags); /** *

      * A list of tags to * apply to the dataset import job. *

      * This is a convenience method that creates an instance of the * {@link software.amazon.awssdk.services.personalize.model.Tag.Builder} avoiding the need to create one * manually via {@link software.amazon.awssdk.services.personalize.model.Tag#builder()}. * *

      * When the {@link Consumer} completes, * {@link software.amazon.awssdk.services.personalize.model.Tag.Builder#build()} is called immediately and its * result is passed to {@link #tags(List)}. * * @param tags * a consumer that will call methods on * {@link software.amazon.awssdk.services.personalize.model.Tag.Builder} * @return Returns a reference to this object so that method calls can be chained together. * @see #tags(java.util.Collection) */ Builder tags(Consumer... tags); /** *

      * Specify how to add the new records to an existing dataset. The default import mode is FULL. If * you haven't imported bulk records into the dataset previously, you can only specify FULL. *

      *
        *
      • *

        * Specify FULL to overwrite all existing bulk data in your dataset. Data you imported individually * is not replaced. *

        *
      • *
      • *

        * Specify INCREMENTAL to append the new records to the existing data in your dataset. Amazon * Personalize replaces any record with the same ID with the new one. *

        *
      • *
      * * @param importMode * Specify how to add the new records to an existing dataset. The default import mode is * FULL. If you haven't imported bulk records into the dataset previously, you can only * specify FULL.

      *
        *
      • *

        * Specify FULL to overwrite all existing bulk data in your dataset. Data you imported * individually is not replaced. *

        *
      • *
      • *

        * Specify INCREMENTAL to append the new records to the existing data in your dataset. * Amazon Personalize replaces any record with the same ID with the new one. *

        *
      • * @see ImportMode * @return Returns a reference to this object so that method calls can be chained together. * @see ImportMode */ Builder importMode(String importMode); /** *

        * Specify how to add the new records to an existing dataset. The default import mode is FULL. If * you haven't imported bulk records into the dataset previously, you can only specify FULL. *

        *
          *
        • *

          * Specify FULL to overwrite all existing bulk data in your dataset. Data you imported individually * is not replaced. *

          *
        • *
        • *

          * Specify INCREMENTAL to append the new records to the existing data in your dataset. Amazon * Personalize replaces any record with the same ID with the new one. *

          *
        • *
        * * @param importMode * Specify how to add the new records to an existing dataset. The default import mode is * FULL. If you haven't imported bulk records into the dataset previously, you can only * specify FULL.

        *
          *
        • *

          * Specify FULL to overwrite all existing bulk data in your dataset. Data you imported * individually is not replaced. *

          *
        • *
        • *

          * Specify INCREMENTAL to append the new records to the existing data in your dataset. * Amazon Personalize replaces any record with the same ID with the new one. *

          *
        • * @see ImportMode * @return Returns a reference to this object so that method calls can be chained together. * @see ImportMode */ Builder importMode(ImportMode importMode); /** *

          * If you created a metric attribution, specify whether to publish metrics for this import job to Amazon S3 *

          * * @param publishAttributionMetricsToS3 * If you created a metric attribution, specify whether to publish metrics for this import job to Amazon * S3 * @return Returns a reference to this object so that method calls can be chained together. */ Builder publishAttributionMetricsToS3(Boolean publishAttributionMetricsToS3); @Override Builder overrideConfiguration(AwsRequestOverrideConfiguration overrideConfiguration); @Override Builder overrideConfiguration(Consumer builderConsumer); } static final class BuilderImpl extends PersonalizeRequest.BuilderImpl implements Builder { private String jobName; private String datasetArn; private DataSource dataSource; private String roleArn; private List tags = DefaultSdkAutoConstructList.getInstance(); private String importMode; private Boolean publishAttributionMetricsToS3; private BuilderImpl() { } private BuilderImpl(CreateDatasetImportJobRequest model) { super(model); jobName(model.jobName); datasetArn(model.datasetArn); dataSource(model.dataSource); roleArn(model.roleArn); tags(model.tags); importMode(model.importMode); publishAttributionMetricsToS3(model.publishAttributionMetricsToS3); } public final String getJobName() { return jobName; } public final void setJobName(String jobName) { this.jobName = jobName; } @Override public final Builder jobName(String jobName) { this.jobName = jobName; return this; } public final String getDatasetArn() { return datasetArn; } public final void setDatasetArn(String datasetArn) { this.datasetArn = datasetArn; } @Override public final Builder datasetArn(String datasetArn) { this.datasetArn = datasetArn; return this; } public final DataSource.Builder getDataSource() { return dataSource != null ? dataSource.toBuilder() : null; } public final void setDataSource(DataSource.BuilderImpl dataSource) { this.dataSource = dataSource != null ? dataSource.build() : null; } @Override public final Builder dataSource(DataSource dataSource) { this.dataSource = dataSource; return this; } public final String getRoleArn() { return roleArn; } public final void setRoleArn(String roleArn) { this.roleArn = roleArn; } @Override public final Builder roleArn(String roleArn) { this.roleArn = roleArn; return this; } public final List getTags() { List result = TagsCopier.copyToBuilder(this.tags); if (result instanceof SdkAutoConstructList) { return null; } return result; } public final void setTags(Collection tags) { this.tags = TagsCopier.copyFromBuilder(tags); } @Override public final Builder tags(Collection tags) { this.tags = TagsCopier.copy(tags); return this; } @Override @SafeVarargs public final Builder tags(Tag... tags) { tags(Arrays.asList(tags)); return this; } @Override @SafeVarargs public final Builder tags(Consumer... tags) { tags(Stream.of(tags).map(c -> Tag.builder().applyMutation(c).build()).collect(Collectors.toList())); return this; } public final String getImportMode() { return importMode; } public final void setImportMode(String importMode) { this.importMode = importMode; } @Override public final Builder importMode(String importMode) { this.importMode = importMode; return this; } @Override public final Builder importMode(ImportMode importMode) { this.importMode(importMode == null ? null : importMode.toString()); return this; } public final Boolean getPublishAttributionMetricsToS3() { return publishAttributionMetricsToS3; } public final void setPublishAttributionMetricsToS3(Boolean publishAttributionMetricsToS3) { this.publishAttributionMetricsToS3 = publishAttributionMetricsToS3; } @Override public final Builder publishAttributionMetricsToS3(Boolean publishAttributionMetricsToS3) { this.publishAttributionMetricsToS3 = publishAttributionMetricsToS3; return this; } @Override public Builder overrideConfiguration(AwsRequestOverrideConfiguration overrideConfiguration) { super.overrideConfiguration(overrideConfiguration); return this; } @Override public Builder overrideConfiguration(Consumer builderConsumer) { super.overrideConfiguration(builderConsumer); return this; } @Override public CreateDatasetImportJobRequest build() { return new CreateDatasetImportJobRequest(this); } @Override public List> sdkFields() { return SDK_FIELDS; } } }




© 2015 - 2024 Weber Informatics LLC | Privacy Policy