All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.amazonaws.services.comprehend.model.ClassifyDocumentRequest Maven / Gradle / Ivy

Go to download

The AWS Java SDK for Amazon Comprehend module holds the client classes that are used for communicating with Amazon Comprehend Service

There is a newer version: 1.12.772
Show newest version
/*
 * Copyright 2018-2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
 * 
 * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
 * the License. A copy of the License is located at
 * 
 * http://aws.amazon.com/apache2.0
 * 
 * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
 * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
 * and limitations under the License.
 */
package com.amazonaws.services.comprehend.model;

import java.io.Serializable;
import javax.annotation.Generated;

import com.amazonaws.AmazonWebServiceRequest;

/**
 * 
 * @see AWS API
 *      Documentation
 */
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ClassifyDocumentRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {

    /**
     * 

* The document text to be analyzed. If you enter text using this parameter, do not use the Bytes * parameter. *

*/ private String text; /** *

* The Amazon Resource Number (ARN) of the endpoint. *

*

* For prompt classification, Amazon Comprehend provides the endpoint ARN: zzz. *

*

* For custom classification, you create an endpoint for your custom model. For more information, see Using Amazon Comprehend * endpoints. *

*/ private String endpointArn; /** *

* Use the Bytes parameter to input a text, PDF, Word or image file. *

*

* When you classify a document using a custom model, you can also use the Bytes parameter to input an * Amazon Textract DetectDocumentText or AnalyzeDocument output file. *

*

* To classify a document using the prompt classifier, use the Text parameter for input. *

*

* Provide the input document as a sequence of base64-encoded bytes. If your code uses an Amazon Web Services SDK to * classify documents, the SDK may encode the document file bytes for you. *

*

* The maximum length of this field depends on the input document type. For details, see Inputs for real-time custom * analysis in the Comprehend Developer Guide. *

*

* If you use the Bytes parameter, do not use the Text parameter. *

*/ private java.nio.ByteBuffer bytes; /** *

* Provides configuration parameters to override the default actions for extracting text from PDF documents and * image files. *

*/ private DocumentReaderConfig documentReaderConfig; /** *

* The document text to be analyzed. If you enter text using this parameter, do not use the Bytes * parameter. *

* * @param text * The document text to be analyzed. If you enter text using this parameter, do not use the * Bytes parameter. */ public void setText(String text) { this.text = text; } /** *

* The document text to be analyzed. If you enter text using this parameter, do not use the Bytes * parameter. *

* * @return The document text to be analyzed. If you enter text using this parameter, do not use the * Bytes parameter. */ public String getText() { return this.text; } /** *

* The document text to be analyzed. If you enter text using this parameter, do not use the Bytes * parameter. *

* * @param text * The document text to be analyzed. If you enter text using this parameter, do not use the * Bytes parameter. * @return Returns a reference to this object so that method calls can be chained together. */ public ClassifyDocumentRequest withText(String text) { setText(text); return this; } /** *

* The Amazon Resource Number (ARN) of the endpoint. *

*

* For prompt classification, Amazon Comprehend provides the endpoint ARN: zzz. *

*

* For custom classification, you create an endpoint for your custom model. For more information, see Using Amazon Comprehend * endpoints. *

* * @param endpointArn * The Amazon Resource Number (ARN) of the endpoint.

*

* For prompt classification, Amazon Comprehend provides the endpoint ARN: zzz. *

*

* For custom classification, you create an endpoint for your custom model. For more information, see Using Amazon Comprehend * endpoints. */ public void setEndpointArn(String endpointArn) { this.endpointArn = endpointArn; } /** *

* The Amazon Resource Number (ARN) of the endpoint. *

*

* For prompt classification, Amazon Comprehend provides the endpoint ARN: zzz. *

*

* For custom classification, you create an endpoint for your custom model. For more information, see Using Amazon Comprehend * endpoints. *

* * @return The Amazon Resource Number (ARN) of the endpoint.

*

* For prompt classification, Amazon Comprehend provides the endpoint ARN: zzz. *

*

* For custom classification, you create an endpoint for your custom model. For more information, see Using Amazon Comprehend * endpoints. */ public String getEndpointArn() { return this.endpointArn; } /** *

* The Amazon Resource Number (ARN) of the endpoint. *

*

* For prompt classification, Amazon Comprehend provides the endpoint ARN: zzz. *

*

* For custom classification, you create an endpoint for your custom model. For more information, see Using Amazon Comprehend * endpoints. *

* * @param endpointArn * The Amazon Resource Number (ARN) of the endpoint.

*

* For prompt classification, Amazon Comprehend provides the endpoint ARN: zzz. *

*

* For custom classification, you create an endpoint for your custom model. For more information, see Using Amazon Comprehend * endpoints. * @return Returns a reference to this object so that method calls can be chained together. */ public ClassifyDocumentRequest withEndpointArn(String endpointArn) { setEndpointArn(endpointArn); return this; } /** *

* Use the Bytes parameter to input a text, PDF, Word or image file. *

*

* When you classify a document using a custom model, you can also use the Bytes parameter to input an * Amazon Textract DetectDocumentText or AnalyzeDocument output file. *

*

* To classify a document using the prompt classifier, use the Text parameter for input. *

*

* Provide the input document as a sequence of base64-encoded bytes. If your code uses an Amazon Web Services SDK to * classify documents, the SDK may encode the document file bytes for you. *

*

* The maximum length of this field depends on the input document type. For details, see Inputs for real-time custom * analysis in the Comprehend Developer Guide. *

*

* If you use the Bytes parameter, do not use the Text parameter. *

*

* The AWS SDK for Java performs a Base64 encoding on this field before sending this request to the AWS service. * Users of the SDK should not perform Base64 encoding on this field. *

*

* Warning: ByteBuffers returned by the SDK are mutable. Changes to the content or position of the byte buffer will * be seen by all objects that have a reference to this object. It is recommended to call ByteBuffer.duplicate() or * ByteBuffer.asReadOnlyBuffer() before using or reading from the buffer. This behavior will be changed in a future * major version of the SDK. *

* * @param bytes * Use the Bytes parameter to input a text, PDF, Word or image file.

*

* When you classify a document using a custom model, you can also use the Bytes parameter to * input an Amazon Textract DetectDocumentText or AnalyzeDocument output file. *

*

* To classify a document using the prompt classifier, use the Text parameter for input. *

*

* Provide the input document as a sequence of base64-encoded bytes. If your code uses an Amazon Web Services * SDK to classify documents, the SDK may encode the document file bytes for you. *

*

* The maximum length of this field depends on the input document type. For details, see Inputs for real-time custom * analysis in the Comprehend Developer Guide. *

*

* If you use the Bytes parameter, do not use the Text parameter. */ public void setBytes(java.nio.ByteBuffer bytes) { this.bytes = bytes; } /** *

* Use the Bytes parameter to input a text, PDF, Word or image file. *

*

* When you classify a document using a custom model, you can also use the Bytes parameter to input an * Amazon Textract DetectDocumentText or AnalyzeDocument output file. *

*

* To classify a document using the prompt classifier, use the Text parameter for input. *

*

* Provide the input document as a sequence of base64-encoded bytes. If your code uses an Amazon Web Services SDK to * classify documents, the SDK may encode the document file bytes for you. *

*

* The maximum length of this field depends on the input document type. For details, see Inputs for real-time custom * analysis in the Comprehend Developer Guide. *

*

* If you use the Bytes parameter, do not use the Text parameter. *

*

* {@code ByteBuffer}s are stateful. Calling their {@code get} methods changes their {@code position}. We recommend * using {@link java.nio.ByteBuffer#asReadOnlyBuffer()} to create a read-only view of the buffer with an independent * {@code position}, and calling {@code get} methods on this rather than directly on the returned {@code ByteBuffer}. * Doing so will ensure that anyone else using the {@code ByteBuffer} will not be affected by changes to the * {@code position}. *

* * @return Use the Bytes parameter to input a text, PDF, Word or image file.

*

* When you classify a document using a custom model, you can also use the Bytes parameter to * input an Amazon Textract DetectDocumentText or AnalyzeDocument output file. *

*

* To classify a document using the prompt classifier, use the Text parameter for input. *

*

* Provide the input document as a sequence of base64-encoded bytes. If your code uses an Amazon Web * Services SDK to classify documents, the SDK may encode the document file bytes for you. *

*

* The maximum length of this field depends on the input document type. For details, see Inputs for real-time custom * analysis in the Comprehend Developer Guide. *

*

* If you use the Bytes parameter, do not use the Text parameter. */ public java.nio.ByteBuffer getBytes() { return this.bytes; } /** *

* Use the Bytes parameter to input a text, PDF, Word or image file. *

*

* When you classify a document using a custom model, you can also use the Bytes parameter to input an * Amazon Textract DetectDocumentText or AnalyzeDocument output file. *

*

* To classify a document using the prompt classifier, use the Text parameter for input. *

*

* Provide the input document as a sequence of base64-encoded bytes. If your code uses an Amazon Web Services SDK to * classify documents, the SDK may encode the document file bytes for you. *

*

* The maximum length of this field depends on the input document type. For details, see Inputs for real-time custom * analysis in the Comprehend Developer Guide. *

*

* If you use the Bytes parameter, do not use the Text parameter. *

*

* The AWS SDK for Java performs a Base64 encoding on this field before sending this request to the AWS service. * Users of the SDK should not perform Base64 encoding on this field. *

*

* Warning: ByteBuffers returned by the SDK are mutable. Changes to the content or position of the byte buffer will * be seen by all objects that have a reference to this object. It is recommended to call ByteBuffer.duplicate() or * ByteBuffer.asReadOnlyBuffer() before using or reading from the buffer. This behavior will be changed in a future * major version of the SDK. *

* * @param bytes * Use the Bytes parameter to input a text, PDF, Word or image file.

*

* When you classify a document using a custom model, you can also use the Bytes parameter to * input an Amazon Textract DetectDocumentText or AnalyzeDocument output file. *

*

* To classify a document using the prompt classifier, use the Text parameter for input. *

*

* Provide the input document as a sequence of base64-encoded bytes. If your code uses an Amazon Web Services * SDK to classify documents, the SDK may encode the document file bytes for you. *

*

* The maximum length of this field depends on the input document type. For details, see Inputs for real-time custom * analysis in the Comprehend Developer Guide. *

*

* If you use the Bytes parameter, do not use the Text parameter. * @return Returns a reference to this object so that method calls can be chained together. */ public ClassifyDocumentRequest withBytes(java.nio.ByteBuffer bytes) { setBytes(bytes); return this; } /** *

* Provides configuration parameters to override the default actions for extracting text from PDF documents and * image files. *

* * @param documentReaderConfig * Provides configuration parameters to override the default actions for extracting text from PDF documents * and image files. */ public void setDocumentReaderConfig(DocumentReaderConfig documentReaderConfig) { this.documentReaderConfig = documentReaderConfig; } /** *

* Provides configuration parameters to override the default actions for extracting text from PDF documents and * image files. *

* * @return Provides configuration parameters to override the default actions for extracting text from PDF documents * and image files. */ public DocumentReaderConfig getDocumentReaderConfig() { return this.documentReaderConfig; } /** *

* Provides configuration parameters to override the default actions for extracting text from PDF documents and * image files. *

* * @param documentReaderConfig * Provides configuration parameters to override the default actions for extracting text from PDF documents * and image files. * @return Returns a reference to this object so that method calls can be chained together. */ public ClassifyDocumentRequest withDocumentReaderConfig(DocumentReaderConfig documentReaderConfig) { setDocumentReaderConfig(documentReaderConfig); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getText() != null) sb.append("Text: ").append("***Sensitive Data Redacted***").append(","); if (getEndpointArn() != null) sb.append("EndpointArn: ").append(getEndpointArn()).append(","); if (getBytes() != null) sb.append("Bytes: ").append(getBytes()).append(","); if (getDocumentReaderConfig() != null) sb.append("DocumentReaderConfig: ").append(getDocumentReaderConfig()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ClassifyDocumentRequest == false) return false; ClassifyDocumentRequest other = (ClassifyDocumentRequest) obj; if (other.getText() == null ^ this.getText() == null) return false; if (other.getText() != null && other.getText().equals(this.getText()) == false) return false; if (other.getEndpointArn() == null ^ this.getEndpointArn() == null) return false; if (other.getEndpointArn() != null && other.getEndpointArn().equals(this.getEndpointArn()) == false) return false; if (other.getBytes() == null ^ this.getBytes() == null) return false; if (other.getBytes() != null && other.getBytes().equals(this.getBytes()) == false) return false; if (other.getDocumentReaderConfig() == null ^ this.getDocumentReaderConfig() == null) return false; if (other.getDocumentReaderConfig() != null && other.getDocumentReaderConfig().equals(this.getDocumentReaderConfig()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getText() == null) ? 0 : getText().hashCode()); hashCode = prime * hashCode + ((getEndpointArn() == null) ? 0 : getEndpointArn().hashCode()); hashCode = prime * hashCode + ((getBytes() == null) ? 0 : getBytes().hashCode()); hashCode = prime * hashCode + ((getDocumentReaderConfig() == null) ? 0 : getDocumentReaderConfig().hashCode()); return hashCode; } @Override public ClassifyDocumentRequest clone() { return (ClassifyDocumentRequest) super.clone(); } }




© 2015 - 2024 Weber Informatics LLC | Privacy Policy