com.amazonaws.services.forecast.model.DescribeDatasetImportJobResult Maven / Gradle / Ivy
Show all versions of aws-java-sdk-forecast Show documentation
/*
* Copyright 2015-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.forecast.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see AWS
* API Documentation
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeDatasetImportJobResult extends com.amazonaws.AmazonWebServiceResult implements Serializable, Cloneable {
/**
*
* The name of the dataset import job.
*
*/
private String datasetImportJobName;
/**
*
* The ARN of the dataset import job.
*
*/
private String datasetImportJobArn;
/**
*
* The Amazon Resource Name (ARN) of the dataset that the training data was imported to.
*
*/
private String datasetArn;
/**
*
* The format of timestamps in the dataset. The format that you specify depends on the DataFrequency
* specified when the dataset was created. The following formats are supported
*
*
* -
*
* "yyyy-MM-dd"
*
*
* For the following data frequencies: Y, M, W, and D
*
*
* -
*
* "yyyy-MM-dd HH:mm:ss"
*
*
* For the following data frequencies: H, 30min, 15min, and 1min; and optionally, for: Y, M, W, and D
*
*
*
*/
private String timestampFormat;
/**
*
* The location of the training data to import and an AWS Identity and Access Management (IAM) role that Amazon
* Forecast can assume to access the data.
*
*
* If encryption is used, DataSource
includes an AWS Key Management Service (KMS) key.
*
*/
private DataSource dataSource;
/**
*
* Statistical information about each field in the input data.
*
*/
private java.util.Map fieldStatistics;
/**
*
* The size of the dataset in gigabytes (GB) after the import job has finished.
*
*/
private Double dataSize;
/**
*
* The status of the dataset import job. The status is reflected in the status of the dataset. For example, when the
* import job status is CREATE_IN_PROGRESS
, the status of the dataset is
* UPDATE_IN_PROGRESS
. States include:
*
*
* -
*
* ACTIVE
*
*
* -
*
* CREATE_PENDING
, CREATE_IN_PROGRESS
, CREATE_FAILED
*
*
* -
*
* DELETE_PENDING
, DELETE_IN_PROGRESS
, DELETE_FAILED
*
*
*
*/
private String status;
/**
*
* If an error occurred, an informational message about the error.
*
*/
private String message;
/**
*
* When the dataset import job was created.
*
*/
private java.util.Date creationTime;
/**
*
* The last time that the dataset was modified. The time depends on the status of the job, as follows:
*
*
* -
*
* CREATE_PENDING
- The same time as CreationTime
.
*
*
* -
*
* CREATE_IN_PROGRESS
- The current timestamp.
*
*
* -
*
* ACTIVE
or CREATE_FAILED
- When the job finished or failed.
*
*
*
*/
private java.util.Date lastModificationTime;
/**
*
* The name of the dataset import job.
*
*
* @param datasetImportJobName
* The name of the dataset import job.
*/
public void setDatasetImportJobName(String datasetImportJobName) {
this.datasetImportJobName = datasetImportJobName;
}
/**
*
* The name of the dataset import job.
*
*
* @return The name of the dataset import job.
*/
public String getDatasetImportJobName() {
return this.datasetImportJobName;
}
/**
*
* The name of the dataset import job.
*
*
* @param datasetImportJobName
* The name of the dataset import job.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeDatasetImportJobResult withDatasetImportJobName(String datasetImportJobName) {
setDatasetImportJobName(datasetImportJobName);
return this;
}
/**
*
* The ARN of the dataset import job.
*
*
* @param datasetImportJobArn
* The ARN of the dataset import job.
*/
public void setDatasetImportJobArn(String datasetImportJobArn) {
this.datasetImportJobArn = datasetImportJobArn;
}
/**
*
* The ARN of the dataset import job.
*
*
* @return The ARN of the dataset import job.
*/
public String getDatasetImportJobArn() {
return this.datasetImportJobArn;
}
/**
*
* The ARN of the dataset import job.
*
*
* @param datasetImportJobArn
* The ARN of the dataset import job.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeDatasetImportJobResult withDatasetImportJobArn(String datasetImportJobArn) {
setDatasetImportJobArn(datasetImportJobArn);
return this;
}
/**
*
* The Amazon Resource Name (ARN) of the dataset that the training data was imported to.
*
*
* @param datasetArn
* The Amazon Resource Name (ARN) of the dataset that the training data was imported to.
*/
public void setDatasetArn(String datasetArn) {
this.datasetArn = datasetArn;
}
/**
*
* The Amazon Resource Name (ARN) of the dataset that the training data was imported to.
*
*
* @return The Amazon Resource Name (ARN) of the dataset that the training data was imported to.
*/
public String getDatasetArn() {
return this.datasetArn;
}
/**
*
* The Amazon Resource Name (ARN) of the dataset that the training data was imported to.
*
*
* @param datasetArn
* The Amazon Resource Name (ARN) of the dataset that the training data was imported to.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeDatasetImportJobResult withDatasetArn(String datasetArn) {
setDatasetArn(datasetArn);
return this;
}
/**
*
* The format of timestamps in the dataset. The format that you specify depends on the DataFrequency
* specified when the dataset was created. The following formats are supported
*
*
* -
*
* "yyyy-MM-dd"
*
*
* For the following data frequencies: Y, M, W, and D
*
*
* -
*
* "yyyy-MM-dd HH:mm:ss"
*
*
* For the following data frequencies: H, 30min, 15min, and 1min; and optionally, for: Y, M, W, and D
*
*
*
*
* @param timestampFormat
* The format of timestamps in the dataset. The format that you specify depends on the
* DataFrequency
specified when the dataset was created. The following formats are supported
*
* -
*
* "yyyy-MM-dd"
*
*
* For the following data frequencies: Y, M, W, and D
*
*
* -
*
* "yyyy-MM-dd HH:mm:ss"
*
*
* For the following data frequencies: H, 30min, 15min, and 1min; and optionally, for: Y, M, W, and D
*
*
*/
public void setTimestampFormat(String timestampFormat) {
this.timestampFormat = timestampFormat;
}
/**
*
* The format of timestamps in the dataset. The format that you specify depends on the DataFrequency
* specified when the dataset was created. The following formats are supported
*
*
* -
*
* "yyyy-MM-dd"
*
*
* For the following data frequencies: Y, M, W, and D
*
*
* -
*
* "yyyy-MM-dd HH:mm:ss"
*
*
* For the following data frequencies: H, 30min, 15min, and 1min; and optionally, for: Y, M, W, and D
*
*
*
*
* @return The format of timestamps in the dataset. The format that you specify depends on the
* DataFrequency
specified when the dataset was created. The following formats are
* supported
*
* -
*
* "yyyy-MM-dd"
*
*
* For the following data frequencies: Y, M, W, and D
*
*
* -
*
* "yyyy-MM-dd HH:mm:ss"
*
*
* For the following data frequencies: H, 30min, 15min, and 1min; and optionally, for: Y, M, W, and D
*
*
*/
public String getTimestampFormat() {
return this.timestampFormat;
}
/**
*
* The format of timestamps in the dataset. The format that you specify depends on the DataFrequency
* specified when the dataset was created. The following formats are supported
*
*
* -
*
* "yyyy-MM-dd"
*
*
* For the following data frequencies: Y, M, W, and D
*
*
* -
*
* "yyyy-MM-dd HH:mm:ss"
*
*
* For the following data frequencies: H, 30min, 15min, and 1min; and optionally, for: Y, M, W, and D
*
*
*
*
* @param timestampFormat
* The format of timestamps in the dataset. The format that you specify depends on the
* DataFrequency
specified when the dataset was created. The following formats are supported
*
* -
*
* "yyyy-MM-dd"
*
*
* For the following data frequencies: Y, M, W, and D
*
*
* -
*
* "yyyy-MM-dd HH:mm:ss"
*
*
* For the following data frequencies: H, 30min, 15min, and 1min; and optionally, for: Y, M, W, and D
*
*
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeDatasetImportJobResult withTimestampFormat(String timestampFormat) {
setTimestampFormat(timestampFormat);
return this;
}
/**
*
* The location of the training data to import and an AWS Identity and Access Management (IAM) role that Amazon
* Forecast can assume to access the data.
*
*
* If encryption is used, DataSource
includes an AWS Key Management Service (KMS) key.
*
*
* @param dataSource
* The location of the training data to import and an AWS Identity and Access Management (IAM) role that
* Amazon Forecast can assume to access the data.
*
* If encryption is used, DataSource
includes an AWS Key Management Service (KMS) key.
*/
public void setDataSource(DataSource dataSource) {
this.dataSource = dataSource;
}
/**
*
* The location of the training data to import and an AWS Identity and Access Management (IAM) role that Amazon
* Forecast can assume to access the data.
*
*
* If encryption is used, DataSource
includes an AWS Key Management Service (KMS) key.
*
*
* @return The location of the training data to import and an AWS Identity and Access Management (IAM) role that
* Amazon Forecast can assume to access the data.
*
* If encryption is used, DataSource
includes an AWS Key Management Service (KMS) key.
*/
public DataSource getDataSource() {
return this.dataSource;
}
/**
*
* The location of the training data to import and an AWS Identity and Access Management (IAM) role that Amazon
* Forecast can assume to access the data.
*
*
* If encryption is used, DataSource
includes an AWS Key Management Service (KMS) key.
*
*
* @param dataSource
* The location of the training data to import and an AWS Identity and Access Management (IAM) role that
* Amazon Forecast can assume to access the data.
*
* If encryption is used, DataSource
includes an AWS Key Management Service (KMS) key.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeDatasetImportJobResult withDataSource(DataSource dataSource) {
setDataSource(dataSource);
return this;
}
/**
*
* Statistical information about each field in the input data.
*
*
* @return Statistical information about each field in the input data.
*/
public java.util.Map getFieldStatistics() {
return fieldStatistics;
}
/**
*
* Statistical information about each field in the input data.
*
*
* @param fieldStatistics
* Statistical information about each field in the input data.
*/
public void setFieldStatistics(java.util.Map fieldStatistics) {
this.fieldStatistics = fieldStatistics;
}
/**
*
* Statistical information about each field in the input data.
*
*
* @param fieldStatistics
* Statistical information about each field in the input data.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeDatasetImportJobResult withFieldStatistics(java.util.Map fieldStatistics) {
setFieldStatistics(fieldStatistics);
return this;
}
public DescribeDatasetImportJobResult addFieldStatisticsEntry(String key, Statistics value) {
if (null == this.fieldStatistics) {
this.fieldStatistics = new java.util.HashMap();
}
if (this.fieldStatistics.containsKey(key))
throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided.");
this.fieldStatistics.put(key, value);
return this;
}
/**
* Removes all the entries added into FieldStatistics.
*
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeDatasetImportJobResult clearFieldStatisticsEntries() {
this.fieldStatistics = null;
return this;
}
/**
*
* The size of the dataset in gigabytes (GB) after the import job has finished.
*
*
* @param dataSize
* The size of the dataset in gigabytes (GB) after the import job has finished.
*/
public void setDataSize(Double dataSize) {
this.dataSize = dataSize;
}
/**
*
* The size of the dataset in gigabytes (GB) after the import job has finished.
*
*
* @return The size of the dataset in gigabytes (GB) after the import job has finished.
*/
public Double getDataSize() {
return this.dataSize;
}
/**
*
* The size of the dataset in gigabytes (GB) after the import job has finished.
*
*
* @param dataSize
* The size of the dataset in gigabytes (GB) after the import job has finished.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeDatasetImportJobResult withDataSize(Double dataSize) {
setDataSize(dataSize);
return this;
}
/**
*
* The status of the dataset import job. The status is reflected in the status of the dataset. For example, when the
* import job status is CREATE_IN_PROGRESS
, the status of the dataset is
* UPDATE_IN_PROGRESS
. States include:
*
*
* -
*
* ACTIVE
*
*
* -
*
* CREATE_PENDING
, CREATE_IN_PROGRESS
, CREATE_FAILED
*
*
* -
*
* DELETE_PENDING
, DELETE_IN_PROGRESS
, DELETE_FAILED
*
*
*
*
* @param status
* The status of the dataset import job. The status is reflected in the status of the dataset. For example,
* when the import job status is CREATE_IN_PROGRESS
, the status of the dataset is
* UPDATE_IN_PROGRESS
. States include:
*
* -
*
* ACTIVE
*
*
* -
*
* CREATE_PENDING
, CREATE_IN_PROGRESS
, CREATE_FAILED
*
*
* -
*
* DELETE_PENDING
, DELETE_IN_PROGRESS
, DELETE_FAILED
*
*
*/
public void setStatus(String status) {
this.status = status;
}
/**
*
* The status of the dataset import job. The status is reflected in the status of the dataset. For example, when the
* import job status is CREATE_IN_PROGRESS
, the status of the dataset is
* UPDATE_IN_PROGRESS
. States include:
*
*
* -
*
* ACTIVE
*
*
* -
*
* CREATE_PENDING
, CREATE_IN_PROGRESS
, CREATE_FAILED
*
*
* -
*
* DELETE_PENDING
, DELETE_IN_PROGRESS
, DELETE_FAILED
*
*
*
*
* @return The status of the dataset import job. The status is reflected in the status of the dataset. For example,
* when the import job status is CREATE_IN_PROGRESS
, the status of the dataset is
* UPDATE_IN_PROGRESS
. States include:
*
* -
*
* ACTIVE
*
*
* -
*
* CREATE_PENDING
, CREATE_IN_PROGRESS
, CREATE_FAILED
*
*
* -
*
* DELETE_PENDING
, DELETE_IN_PROGRESS
, DELETE_FAILED
*
*
*/
public String getStatus() {
return this.status;
}
/**
*
* The status of the dataset import job. The status is reflected in the status of the dataset. For example, when the
* import job status is CREATE_IN_PROGRESS
, the status of the dataset is
* UPDATE_IN_PROGRESS
. States include:
*
*
* -
*
* ACTIVE
*
*
* -
*
* CREATE_PENDING
, CREATE_IN_PROGRESS
, CREATE_FAILED
*
*
* -
*
* DELETE_PENDING
, DELETE_IN_PROGRESS
, DELETE_FAILED
*
*
*
*
* @param status
* The status of the dataset import job. The status is reflected in the status of the dataset. For example,
* when the import job status is CREATE_IN_PROGRESS
, the status of the dataset is
* UPDATE_IN_PROGRESS
. States include:
*
* -
*
* ACTIVE
*
*
* -
*
* CREATE_PENDING
, CREATE_IN_PROGRESS
, CREATE_FAILED
*
*
* -
*
* DELETE_PENDING
, DELETE_IN_PROGRESS
, DELETE_FAILED
*
*
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeDatasetImportJobResult withStatus(String status) {
setStatus(status);
return this;
}
/**
*
* If an error occurred, an informational message about the error.
*
*
* @param message
* If an error occurred, an informational message about the error.
*/
public void setMessage(String message) {
this.message = message;
}
/**
*
* If an error occurred, an informational message about the error.
*
*
* @return If an error occurred, an informational message about the error.
*/
public String getMessage() {
return this.message;
}
/**
*
* If an error occurred, an informational message about the error.
*
*
* @param message
* If an error occurred, an informational message about the error.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeDatasetImportJobResult withMessage(String message) {
setMessage(message);
return this;
}
/**
*
* When the dataset import job was created.
*
*
* @param creationTime
* When the dataset import job was created.
*/
public void setCreationTime(java.util.Date creationTime) {
this.creationTime = creationTime;
}
/**
*
* When the dataset import job was created.
*
*
* @return When the dataset import job was created.
*/
public java.util.Date getCreationTime() {
return this.creationTime;
}
/**
*
* When the dataset import job was created.
*
*
* @param creationTime
* When the dataset import job was created.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeDatasetImportJobResult withCreationTime(java.util.Date creationTime) {
setCreationTime(creationTime);
return this;
}
/**
*
* The last time that the dataset was modified. The time depends on the status of the job, as follows:
*
*
* -
*
* CREATE_PENDING
- The same time as CreationTime
.
*
*
* -
*
* CREATE_IN_PROGRESS
- The current timestamp.
*
*
* -
*
* ACTIVE
or CREATE_FAILED
- When the job finished or failed.
*
*
*
*
* @param lastModificationTime
* The last time that the dataset was modified. The time depends on the status of the job, as follows:
*
* -
*
* CREATE_PENDING
- The same time as CreationTime
.
*
*
* -
*
* CREATE_IN_PROGRESS
- The current timestamp.
*
*
* -
*
* ACTIVE
or CREATE_FAILED
- When the job finished or failed.
*
*
*/
public void setLastModificationTime(java.util.Date lastModificationTime) {
this.lastModificationTime = lastModificationTime;
}
/**
*
* The last time that the dataset was modified. The time depends on the status of the job, as follows:
*
*
* -
*
* CREATE_PENDING
- The same time as CreationTime
.
*
*
* -
*
* CREATE_IN_PROGRESS
- The current timestamp.
*
*
* -
*
* ACTIVE
or CREATE_FAILED
- When the job finished or failed.
*
*
*
*
* @return The last time that the dataset was modified. The time depends on the status of the job, as follows:
*
* -
*
* CREATE_PENDING
- The same time as CreationTime
.
*
*
* -
*
* CREATE_IN_PROGRESS
- The current timestamp.
*
*
* -
*
* ACTIVE
or CREATE_FAILED
- When the job finished or failed.
*
*
*/
public java.util.Date getLastModificationTime() {
return this.lastModificationTime;
}
/**
*
* The last time that the dataset was modified. The time depends on the status of the job, as follows:
*
*
* -
*
* CREATE_PENDING
- The same time as CreationTime
.
*
*
* -
*
* CREATE_IN_PROGRESS
- The current timestamp.
*
*
* -
*
* ACTIVE
or CREATE_FAILED
- When the job finished or failed.
*
*
*
*
* @param lastModificationTime
* The last time that the dataset was modified. The time depends on the status of the job, as follows:
*
* -
*
* CREATE_PENDING
- The same time as CreationTime
.
*
*
* -
*
* CREATE_IN_PROGRESS
- The current timestamp.
*
*
* -
*
* ACTIVE
or CREATE_FAILED
- When the job finished or failed.
*
*
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeDatasetImportJobResult withLastModificationTime(java.util.Date lastModificationTime) {
setLastModificationTime(lastModificationTime);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getDatasetImportJobName() != null)
sb.append("DatasetImportJobName: ").append(getDatasetImportJobName()).append(",");
if (getDatasetImportJobArn() != null)
sb.append("DatasetImportJobArn: ").append(getDatasetImportJobArn()).append(",");
if (getDatasetArn() != null)
sb.append("DatasetArn: ").append(getDatasetArn()).append(",");
if (getTimestampFormat() != null)
sb.append("TimestampFormat: ").append(getTimestampFormat()).append(",");
if (getDataSource() != null)
sb.append("DataSource: ").append(getDataSource()).append(",");
if (getFieldStatistics() != null)
sb.append("FieldStatistics: ").append(getFieldStatistics()).append(",");
if (getDataSize() != null)
sb.append("DataSize: ").append(getDataSize()).append(",");
if (getStatus() != null)
sb.append("Status: ").append(getStatus()).append(",");
if (getMessage() != null)
sb.append("Message: ").append(getMessage()).append(",");
if (getCreationTime() != null)
sb.append("CreationTime: ").append(getCreationTime()).append(",");
if (getLastModificationTime() != null)
sb.append("LastModificationTime: ").append(getLastModificationTime());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeDatasetImportJobResult == false)
return false;
DescribeDatasetImportJobResult other = (DescribeDatasetImportJobResult) obj;
if (other.getDatasetImportJobName() == null ^ this.getDatasetImportJobName() == null)
return false;
if (other.getDatasetImportJobName() != null && other.getDatasetImportJobName().equals(this.getDatasetImportJobName()) == false)
return false;
if (other.getDatasetImportJobArn() == null ^ this.getDatasetImportJobArn() == null)
return false;
if (other.getDatasetImportJobArn() != null && other.getDatasetImportJobArn().equals(this.getDatasetImportJobArn()) == false)
return false;
if (other.getDatasetArn() == null ^ this.getDatasetArn() == null)
return false;
if (other.getDatasetArn() != null && other.getDatasetArn().equals(this.getDatasetArn()) == false)
return false;
if (other.getTimestampFormat() == null ^ this.getTimestampFormat() == null)
return false;
if (other.getTimestampFormat() != null && other.getTimestampFormat().equals(this.getTimestampFormat()) == false)
return false;
if (other.getDataSource() == null ^ this.getDataSource() == null)
return false;
if (other.getDataSource() != null && other.getDataSource().equals(this.getDataSource()) == false)
return false;
if (other.getFieldStatistics() == null ^ this.getFieldStatistics() == null)
return false;
if (other.getFieldStatistics() != null && other.getFieldStatistics().equals(this.getFieldStatistics()) == false)
return false;
if (other.getDataSize() == null ^ this.getDataSize() == null)
return false;
if (other.getDataSize() != null && other.getDataSize().equals(this.getDataSize()) == false)
return false;
if (other.getStatus() == null ^ this.getStatus() == null)
return false;
if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false)
return false;
if (other.getMessage() == null ^ this.getMessage() == null)
return false;
if (other.getMessage() != null && other.getMessage().equals(this.getMessage()) == false)
return false;
if (other.getCreationTime() == null ^ this.getCreationTime() == null)
return false;
if (other.getCreationTime() != null && other.getCreationTime().equals(this.getCreationTime()) == false)
return false;
if (other.getLastModificationTime() == null ^ this.getLastModificationTime() == null)
return false;
if (other.getLastModificationTime() != null && other.getLastModificationTime().equals(this.getLastModificationTime()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getDatasetImportJobName() == null) ? 0 : getDatasetImportJobName().hashCode());
hashCode = prime * hashCode + ((getDatasetImportJobArn() == null) ? 0 : getDatasetImportJobArn().hashCode());
hashCode = prime * hashCode + ((getDatasetArn() == null) ? 0 : getDatasetArn().hashCode());
hashCode = prime * hashCode + ((getTimestampFormat() == null) ? 0 : getTimestampFormat().hashCode());
hashCode = prime * hashCode + ((getDataSource() == null) ? 0 : getDataSource().hashCode());
hashCode = prime * hashCode + ((getFieldStatistics() == null) ? 0 : getFieldStatistics().hashCode());
hashCode = prime * hashCode + ((getDataSize() == null) ? 0 : getDataSize().hashCode());
hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode());
hashCode = prime * hashCode + ((getMessage() == null) ? 0 : getMessage().hashCode());
hashCode = prime * hashCode + ((getCreationTime() == null) ? 0 : getCreationTime().hashCode());
hashCode = prime * hashCode + ((getLastModificationTime() == null) ? 0 : getLastModificationTime().hashCode());
return hashCode;
}
@Override
public DescribeDatasetImportJobResult clone() {
try {
return (DescribeDatasetImportJobResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}