All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.amazonaws.services.comprehend.model.DatasetInputDataConfig Maven / Gradle / Ivy

Go to download

The AWS Java SDK for Amazon Comprehend module holds the client classes that are used for communicating with Amazon Comprehend Service

There is a newer version: 1.12.772
Show newest version
/*
 * Copyright 2018-2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
 * 
 * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
 * the License. A copy of the License is located at
 * 
 * http://aws.amazon.com/apache2.0
 * 
 * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
 * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
 * and limitations under the License.
 */
package com.amazonaws.services.comprehend.model;

import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;

/**
 * 

* Specifies the format and location of the input data for the dataset. *

* * @see AWS * API Documentation */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DatasetInputDataConfig implements Serializable, Cloneable, StructuredPojo { /** *

* A list of augmented manifest files that provide training data for your custom model. An augmented manifest file * is a labeled dataset that is produced by Amazon SageMaker Ground Truth. *

*/ private java.util.List augmentedManifests; /** *

* COMPREHEND_CSV: The data format is a two-column CSV file, where the first column contains labels and * the second column contains documents. *

*

* AUGMENTED_MANIFEST: The data format *

*/ private String dataFormat; /** *

* The input properties for training a document classifier model. *

*

* For more information on how the input file is formatted, see Preparing training data in * the Comprehend Developer Guide. *

*/ private DatasetDocumentClassifierInputDataConfig documentClassifierInputDataConfig; /** *

* The input properties for training an entity recognizer model. *

*/ private DatasetEntityRecognizerInputDataConfig entityRecognizerInputDataConfig; /** *

* A list of augmented manifest files that provide training data for your custom model. An augmented manifest file * is a labeled dataset that is produced by Amazon SageMaker Ground Truth. *

* * @return A list of augmented manifest files that provide training data for your custom model. An augmented * manifest file is a labeled dataset that is produced by Amazon SageMaker Ground Truth. */ public java.util.List getAugmentedManifests() { return augmentedManifests; } /** *

* A list of augmented manifest files that provide training data for your custom model. An augmented manifest file * is a labeled dataset that is produced by Amazon SageMaker Ground Truth. *

* * @param augmentedManifests * A list of augmented manifest files that provide training data for your custom model. An augmented manifest * file is a labeled dataset that is produced by Amazon SageMaker Ground Truth. */ public void setAugmentedManifests(java.util.Collection augmentedManifests) { if (augmentedManifests == null) { this.augmentedManifests = null; return; } this.augmentedManifests = new java.util.ArrayList(augmentedManifests); } /** *

* A list of augmented manifest files that provide training data for your custom model. An augmented manifest file * is a labeled dataset that is produced by Amazon SageMaker Ground Truth. *

*

* NOTE: This method appends the values to the existing list (if any). Use * {@link #setAugmentedManifests(java.util.Collection)} or {@link #withAugmentedManifests(java.util.Collection)} if * you want to override the existing values. *

* * @param augmentedManifests * A list of augmented manifest files that provide training data for your custom model. An augmented manifest * file is a labeled dataset that is produced by Amazon SageMaker Ground Truth. * @return Returns a reference to this object so that method calls can be chained together. */ public DatasetInputDataConfig withAugmentedManifests(DatasetAugmentedManifestsListItem... augmentedManifests) { if (this.augmentedManifests == null) { setAugmentedManifests(new java.util.ArrayList(augmentedManifests.length)); } for (DatasetAugmentedManifestsListItem ele : augmentedManifests) { this.augmentedManifests.add(ele); } return this; } /** *

* A list of augmented manifest files that provide training data for your custom model. An augmented manifest file * is a labeled dataset that is produced by Amazon SageMaker Ground Truth. *

* * @param augmentedManifests * A list of augmented manifest files that provide training data for your custom model. An augmented manifest * file is a labeled dataset that is produced by Amazon SageMaker Ground Truth. * @return Returns a reference to this object so that method calls can be chained together. */ public DatasetInputDataConfig withAugmentedManifests(java.util.Collection augmentedManifests) { setAugmentedManifests(augmentedManifests); return this; } /** *

* COMPREHEND_CSV: The data format is a two-column CSV file, where the first column contains labels and * the second column contains documents. *

*

* AUGMENTED_MANIFEST: The data format *

* * @param dataFormat * COMPREHEND_CSV: The data format is a two-column CSV file, where the first column contains * labels and the second column contains documents.

*

* AUGMENTED_MANIFEST: The data format * @see DatasetDataFormat */ public void setDataFormat(String dataFormat) { this.dataFormat = dataFormat; } /** *

* COMPREHEND_CSV: The data format is a two-column CSV file, where the first column contains labels and * the second column contains documents. *

*

* AUGMENTED_MANIFEST: The data format *

* * @return COMPREHEND_CSV: The data format is a two-column CSV file, where the first column contains * labels and the second column contains documents.

*

* AUGMENTED_MANIFEST: The data format * @see DatasetDataFormat */ public String getDataFormat() { return this.dataFormat; } /** *

* COMPREHEND_CSV: The data format is a two-column CSV file, where the first column contains labels and * the second column contains documents. *

*

* AUGMENTED_MANIFEST: The data format *

* * @param dataFormat * COMPREHEND_CSV: The data format is a two-column CSV file, where the first column contains * labels and the second column contains documents.

*

* AUGMENTED_MANIFEST: The data format * @return Returns a reference to this object so that method calls can be chained together. * @see DatasetDataFormat */ public DatasetInputDataConfig withDataFormat(String dataFormat) { setDataFormat(dataFormat); return this; } /** *

* COMPREHEND_CSV: The data format is a two-column CSV file, where the first column contains labels and * the second column contains documents. *

*

* AUGMENTED_MANIFEST: The data format *

* * @param dataFormat * COMPREHEND_CSV: The data format is a two-column CSV file, where the first column contains * labels and the second column contains documents.

*

* AUGMENTED_MANIFEST: The data format * @return Returns a reference to this object so that method calls can be chained together. * @see DatasetDataFormat */ public DatasetInputDataConfig withDataFormat(DatasetDataFormat dataFormat) { this.dataFormat = dataFormat.toString(); return this; } /** *

* The input properties for training a document classifier model. *

*

* For more information on how the input file is formatted, see Preparing training data in * the Comprehend Developer Guide. *

* * @param documentClassifierInputDataConfig * The input properties for training a document classifier model.

*

* For more information on how the input file is formatted, see Preparing training * data in the Comprehend Developer Guide. */ public void setDocumentClassifierInputDataConfig(DatasetDocumentClassifierInputDataConfig documentClassifierInputDataConfig) { this.documentClassifierInputDataConfig = documentClassifierInputDataConfig; } /** *

* The input properties for training a document classifier model. *

*

* For more information on how the input file is formatted, see Preparing training data in * the Comprehend Developer Guide. *

* * @return The input properties for training a document classifier model.

*

* For more information on how the input file is formatted, see Preparing training * data in the Comprehend Developer Guide. */ public DatasetDocumentClassifierInputDataConfig getDocumentClassifierInputDataConfig() { return this.documentClassifierInputDataConfig; } /** *

* The input properties for training a document classifier model. *

*

* For more information on how the input file is formatted, see Preparing training data in * the Comprehend Developer Guide. *

* * @param documentClassifierInputDataConfig * The input properties for training a document classifier model.

*

* For more information on how the input file is formatted, see Preparing training * data in the Comprehend Developer Guide. * @return Returns a reference to this object so that method calls can be chained together. */ public DatasetInputDataConfig withDocumentClassifierInputDataConfig(DatasetDocumentClassifierInputDataConfig documentClassifierInputDataConfig) { setDocumentClassifierInputDataConfig(documentClassifierInputDataConfig); return this; } /** *

* The input properties for training an entity recognizer model. *

* * @param entityRecognizerInputDataConfig * The input properties for training an entity recognizer model. */ public void setEntityRecognizerInputDataConfig(DatasetEntityRecognizerInputDataConfig entityRecognizerInputDataConfig) { this.entityRecognizerInputDataConfig = entityRecognizerInputDataConfig; } /** *

* The input properties for training an entity recognizer model. *

* * @return The input properties for training an entity recognizer model. */ public DatasetEntityRecognizerInputDataConfig getEntityRecognizerInputDataConfig() { return this.entityRecognizerInputDataConfig; } /** *

* The input properties for training an entity recognizer model. *

* * @param entityRecognizerInputDataConfig * The input properties for training an entity recognizer model. * @return Returns a reference to this object so that method calls can be chained together. */ public DatasetInputDataConfig withEntityRecognizerInputDataConfig(DatasetEntityRecognizerInputDataConfig entityRecognizerInputDataConfig) { setEntityRecognizerInputDataConfig(entityRecognizerInputDataConfig); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getAugmentedManifests() != null) sb.append("AugmentedManifests: ").append(getAugmentedManifests()).append(","); if (getDataFormat() != null) sb.append("DataFormat: ").append(getDataFormat()).append(","); if (getDocumentClassifierInputDataConfig() != null) sb.append("DocumentClassifierInputDataConfig: ").append(getDocumentClassifierInputDataConfig()).append(","); if (getEntityRecognizerInputDataConfig() != null) sb.append("EntityRecognizerInputDataConfig: ").append(getEntityRecognizerInputDataConfig()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DatasetInputDataConfig == false) return false; DatasetInputDataConfig other = (DatasetInputDataConfig) obj; if (other.getAugmentedManifests() == null ^ this.getAugmentedManifests() == null) return false; if (other.getAugmentedManifests() != null && other.getAugmentedManifests().equals(this.getAugmentedManifests()) == false) return false; if (other.getDataFormat() == null ^ this.getDataFormat() == null) return false; if (other.getDataFormat() != null && other.getDataFormat().equals(this.getDataFormat()) == false) return false; if (other.getDocumentClassifierInputDataConfig() == null ^ this.getDocumentClassifierInputDataConfig() == null) return false; if (other.getDocumentClassifierInputDataConfig() != null && other.getDocumentClassifierInputDataConfig().equals(this.getDocumentClassifierInputDataConfig()) == false) return false; if (other.getEntityRecognizerInputDataConfig() == null ^ this.getEntityRecognizerInputDataConfig() == null) return false; if (other.getEntityRecognizerInputDataConfig() != null && other.getEntityRecognizerInputDataConfig().equals(this.getEntityRecognizerInputDataConfig()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getAugmentedManifests() == null) ? 0 : getAugmentedManifests().hashCode()); hashCode = prime * hashCode + ((getDataFormat() == null) ? 0 : getDataFormat().hashCode()); hashCode = prime * hashCode + ((getDocumentClassifierInputDataConfig() == null) ? 0 : getDocumentClassifierInputDataConfig().hashCode()); hashCode = prime * hashCode + ((getEntityRecognizerInputDataConfig() == null) ? 0 : getEntityRecognizerInputDataConfig().hashCode()); return hashCode; } @Override public DatasetInputDataConfig clone() { try { return (DatasetInputDataConfig) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.comprehend.model.transform.DatasetInputDataConfigMarshaller.getInstance().marshall(this, protocolMarshaller); } }




© 2015 - 2024 Weber Informatics LLC | Privacy Policy