com.amazonaws.services.sagemaker.model.AutoMLJobChannel Maven / Gradle / Ivy
Show all versions of aws-java-sdk-sagemaker Show documentation
/*
* Copyright 2019-2024 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.sagemaker.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
*
* A channel is a named input source that training algorithms can consume. This channel is used for AutoML jobs V2 (jobs
* created by calling CreateAutoMLJobV2).
*
*
* @see AWS API
* Documentation
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AutoMLJobChannel implements Serializable, Cloneable, StructuredPojo {
/**
*
* The type of channel. Defines whether the data are used for training or validation. The default value is
* training
. Channels for training
and validation
must share the same
* ContentType
*
*
*
* The type of channel defaults to training
for the time-series forecasting problem type.
*
*
*/
private String channelType;
/**
*
* The content type of the data from the input source. The following are the allowed content types for different
* problems:
*
*
* -
*
* For tabular problem types: text/csv;header=present
or x-application/vnd.amazon+parquet
.
* The default value is text/csv;header=present
.
*
*
* -
*
* For image classification: image/png
, image/jpeg
, or image/*
. The default
* value is image/*
.
*
*
* -
*
* For text classification: text/csv;header=present
or x-application/vnd.amazon+parquet
.
* The default value is text/csv;header=present
.
*
*
* -
*
* For time-series forecasting: text/csv;header=present
or
* x-application/vnd.amazon+parquet
. The default value is text/csv;header=present
.
*
*
* -
*
* For text generation (LLMs fine-tuning): text/csv;header=present
or
* x-application/vnd.amazon+parquet
. The default value is text/csv;header=present
.
*
*
*
*/
private String contentType;
/**
*
* The allowed compression types depend on the input format and problem type. We allow the compression type
* Gzip
for S3Prefix
inputs on tabular data only. For all other inputs, the compression
* type should be None
. If no compression type is provided, we default to None
.
*
*/
private String compressionType;
/**
*
* The data source for an AutoML channel (Required).
*
*/
private AutoMLDataSource dataSource;
/**
*
* The type of channel. Defines whether the data are used for training or validation. The default value is
* training
. Channels for training
and validation
must share the same
* ContentType
*
*
*
* The type of channel defaults to training
for the time-series forecasting problem type.
*
*
*
* @param channelType
* The type of channel. Defines whether the data are used for training or validation. The default value is
* training
. Channels for training
and validation
must share the same
* ContentType
*
* The type of channel defaults to training
for the time-series forecasting problem type.
*
* @see AutoMLChannelType
*/
public void setChannelType(String channelType) {
this.channelType = channelType;
}
/**
*
* The type of channel. Defines whether the data are used for training or validation. The default value is
* training
. Channels for training
and validation
must share the same
* ContentType
*
*
*
* The type of channel defaults to training
for the time-series forecasting problem type.
*
*
*
* @return The type of channel. Defines whether the data are used for training or validation. The default value is
* training
. Channels for training
and validation
must share the same
* ContentType
*
* The type of channel defaults to training
for the time-series forecasting problem type.
*
* @see AutoMLChannelType
*/
public String getChannelType() {
return this.channelType;
}
/**
*
* The type of channel. Defines whether the data are used for training or validation. The default value is
* training
. Channels for training
and validation
must share the same
* ContentType
*
*
*
* The type of channel defaults to training
for the time-series forecasting problem type.
*
*
*
* @param channelType
* The type of channel. Defines whether the data are used for training or validation. The default value is
* training
. Channels for training
and validation
must share the same
* ContentType
*
* The type of channel defaults to training
for the time-series forecasting problem type.
*
* @return Returns a reference to this object so that method calls can be chained together.
* @see AutoMLChannelType
*/
public AutoMLJobChannel withChannelType(String channelType) {
setChannelType(channelType);
return this;
}
/**
*
* The type of channel. Defines whether the data are used for training or validation. The default value is
* training
. Channels for training
and validation
must share the same
* ContentType
*
*
*
* The type of channel defaults to training
for the time-series forecasting problem type.
*
*
*
* @param channelType
* The type of channel. Defines whether the data are used for training or validation. The default value is
* training
. Channels for training
and validation
must share the same
* ContentType
*
* The type of channel defaults to training
for the time-series forecasting problem type.
*
* @return Returns a reference to this object so that method calls can be chained together.
* @see AutoMLChannelType
*/
public AutoMLJobChannel withChannelType(AutoMLChannelType channelType) {
this.channelType = channelType.toString();
return this;
}
/**
*
* The content type of the data from the input source. The following are the allowed content types for different
* problems:
*
*
* -
*
* For tabular problem types: text/csv;header=present
or x-application/vnd.amazon+parquet
.
* The default value is text/csv;header=present
.
*
*
* -
*
* For image classification: image/png
, image/jpeg
, or image/*
. The default
* value is image/*
.
*
*
* -
*
* For text classification: text/csv;header=present
or x-application/vnd.amazon+parquet
.
* The default value is text/csv;header=present
.
*
*
* -
*
* For time-series forecasting: text/csv;header=present
or
* x-application/vnd.amazon+parquet
. The default value is text/csv;header=present
.
*
*
* -
*
* For text generation (LLMs fine-tuning): text/csv;header=present
or
* x-application/vnd.amazon+parquet
. The default value is text/csv;header=present
.
*
*
*
*
* @param contentType
* The content type of the data from the input source. The following are the allowed content types for
* different problems:
*
* -
*
* For tabular problem types: text/csv;header=present
or
* x-application/vnd.amazon+parquet
. The default value is text/csv;header=present
.
*
*
* -
*
* For image classification: image/png
, image/jpeg
, or image/*
. The
* default value is image/*
.
*
*
* -
*
* For text classification: text/csv;header=present
or
* x-application/vnd.amazon+parquet
. The default value is text/csv;header=present
.
*
*
* -
*
* For time-series forecasting: text/csv;header=present
or
* x-application/vnd.amazon+parquet
. The default value is text/csv;header=present
.
*
*
* -
*
* For text generation (LLMs fine-tuning): text/csv;header=present
or
* x-application/vnd.amazon+parquet
. The default value is text/csv;header=present
.
*
*
*/
public void setContentType(String contentType) {
this.contentType = contentType;
}
/**
*
* The content type of the data from the input source. The following are the allowed content types for different
* problems:
*
*
* -
*
* For tabular problem types: text/csv;header=present
or x-application/vnd.amazon+parquet
.
* The default value is text/csv;header=present
.
*
*
* -
*
* For image classification: image/png
, image/jpeg
, or image/*
. The default
* value is image/*
.
*
*
* -
*
* For text classification: text/csv;header=present
or x-application/vnd.amazon+parquet
.
* The default value is text/csv;header=present
.
*
*
* -
*
* For time-series forecasting: text/csv;header=present
or
* x-application/vnd.amazon+parquet
. The default value is text/csv;header=present
.
*
*
* -
*
* For text generation (LLMs fine-tuning): text/csv;header=present
or
* x-application/vnd.amazon+parquet
. The default value is text/csv;header=present
.
*
*
*
*
* @return The content type of the data from the input source. The following are the allowed content types for
* different problems:
*
* -
*
* For tabular problem types: text/csv;header=present
or
* x-application/vnd.amazon+parquet
. The default value is text/csv;header=present
.
*
*
* -
*
* For image classification: image/png
, image/jpeg
, or image/*
. The
* default value is image/*
.
*
*
* -
*
* For text classification: text/csv;header=present
or
* x-application/vnd.amazon+parquet
. The default value is text/csv;header=present
.
*
*
* -
*
* For time-series forecasting: text/csv;header=present
or
* x-application/vnd.amazon+parquet
. The default value is text/csv;header=present
.
*
*
* -
*
* For text generation (LLMs fine-tuning): text/csv;header=present
or
* x-application/vnd.amazon+parquet
. The default value is text/csv;header=present
.
*
*
*/
public String getContentType() {
return this.contentType;
}
/**
*
* The content type of the data from the input source. The following are the allowed content types for different
* problems:
*
*
* -
*
* For tabular problem types: text/csv;header=present
or x-application/vnd.amazon+parquet
.
* The default value is text/csv;header=present
.
*
*
* -
*
* For image classification: image/png
, image/jpeg
, or image/*
. The default
* value is image/*
.
*
*
* -
*
* For text classification: text/csv;header=present
or x-application/vnd.amazon+parquet
.
* The default value is text/csv;header=present
.
*
*
* -
*
* For time-series forecasting: text/csv;header=present
or
* x-application/vnd.amazon+parquet
. The default value is text/csv;header=present
.
*
*
* -
*
* For text generation (LLMs fine-tuning): text/csv;header=present
or
* x-application/vnd.amazon+parquet
. The default value is text/csv;header=present
.
*
*
*
*
* @param contentType
* The content type of the data from the input source. The following are the allowed content types for
* different problems:
*
* -
*
* For tabular problem types: text/csv;header=present
or
* x-application/vnd.amazon+parquet
. The default value is text/csv;header=present
.
*
*
* -
*
* For image classification: image/png
, image/jpeg
, or image/*
. The
* default value is image/*
.
*
*
* -
*
* For text classification: text/csv;header=present
or
* x-application/vnd.amazon+parquet
. The default value is text/csv;header=present
.
*
*
* -
*
* For time-series forecasting: text/csv;header=present
or
* x-application/vnd.amazon+parquet
. The default value is text/csv;header=present
.
*
*
* -
*
* For text generation (LLMs fine-tuning): text/csv;header=present
or
* x-application/vnd.amazon+parquet
. The default value is text/csv;header=present
.
*
*
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AutoMLJobChannel withContentType(String contentType) {
setContentType(contentType);
return this;
}
/**
*
* The allowed compression types depend on the input format and problem type. We allow the compression type
* Gzip
for S3Prefix
inputs on tabular data only. For all other inputs, the compression
* type should be None
. If no compression type is provided, we default to None
.
*
*
* @param compressionType
* The allowed compression types depend on the input format and problem type. We allow the compression type
* Gzip
for S3Prefix
inputs on tabular data only. For all other inputs, the
* compression type should be None
. If no compression type is provided, we default to
* None
.
* @see CompressionType
*/
public void setCompressionType(String compressionType) {
this.compressionType = compressionType;
}
/**
*
* The allowed compression types depend on the input format and problem type. We allow the compression type
* Gzip
for S3Prefix
inputs on tabular data only. For all other inputs, the compression
* type should be None
. If no compression type is provided, we default to None
.
*
*
* @return The allowed compression types depend on the input format and problem type. We allow the compression type
* Gzip
for S3Prefix
inputs on tabular data only. For all other inputs, the
* compression type should be None
. If no compression type is provided, we default to
* None
.
* @see CompressionType
*/
public String getCompressionType() {
return this.compressionType;
}
/**
*
* The allowed compression types depend on the input format and problem type. We allow the compression type
* Gzip
for S3Prefix
inputs on tabular data only. For all other inputs, the compression
* type should be None
. If no compression type is provided, we default to None
.
*
*
* @param compressionType
* The allowed compression types depend on the input format and problem type. We allow the compression type
* Gzip
for S3Prefix
inputs on tabular data only. For all other inputs, the
* compression type should be None
. If no compression type is provided, we default to
* None
.
* @return Returns a reference to this object so that method calls can be chained together.
* @see CompressionType
*/
public AutoMLJobChannel withCompressionType(String compressionType) {
setCompressionType(compressionType);
return this;
}
/**
*
* The allowed compression types depend on the input format and problem type. We allow the compression type
* Gzip
for S3Prefix
inputs on tabular data only. For all other inputs, the compression
* type should be None
. If no compression type is provided, we default to None
.
*
*
* @param compressionType
* The allowed compression types depend on the input format and problem type. We allow the compression type
* Gzip
for S3Prefix
inputs on tabular data only. For all other inputs, the
* compression type should be None
. If no compression type is provided, we default to
* None
.
* @return Returns a reference to this object so that method calls can be chained together.
* @see CompressionType
*/
public AutoMLJobChannel withCompressionType(CompressionType compressionType) {
this.compressionType = compressionType.toString();
return this;
}
/**
*
* The data source for an AutoML channel (Required).
*
*
* @param dataSource
* The data source for an AutoML channel (Required).
*/
public void setDataSource(AutoMLDataSource dataSource) {
this.dataSource = dataSource;
}
/**
*
* The data source for an AutoML channel (Required).
*
*
* @return The data source for an AutoML channel (Required).
*/
public AutoMLDataSource getDataSource() {
return this.dataSource;
}
/**
*
* The data source for an AutoML channel (Required).
*
*
* @param dataSource
* The data source for an AutoML channel (Required).
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AutoMLJobChannel withDataSource(AutoMLDataSource dataSource) {
setDataSource(dataSource);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getChannelType() != null)
sb.append("ChannelType: ").append(getChannelType()).append(",");
if (getContentType() != null)
sb.append("ContentType: ").append(getContentType()).append(",");
if (getCompressionType() != null)
sb.append("CompressionType: ").append(getCompressionType()).append(",");
if (getDataSource() != null)
sb.append("DataSource: ").append(getDataSource());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof AutoMLJobChannel == false)
return false;
AutoMLJobChannel other = (AutoMLJobChannel) obj;
if (other.getChannelType() == null ^ this.getChannelType() == null)
return false;
if (other.getChannelType() != null && other.getChannelType().equals(this.getChannelType()) == false)
return false;
if (other.getContentType() == null ^ this.getContentType() == null)
return false;
if (other.getContentType() != null && other.getContentType().equals(this.getContentType()) == false)
return false;
if (other.getCompressionType() == null ^ this.getCompressionType() == null)
return false;
if (other.getCompressionType() != null && other.getCompressionType().equals(this.getCompressionType()) == false)
return false;
if (other.getDataSource() == null ^ this.getDataSource() == null)
return false;
if (other.getDataSource() != null && other.getDataSource().equals(this.getDataSource()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getChannelType() == null) ? 0 : getChannelType().hashCode());
hashCode = prime * hashCode + ((getContentType() == null) ? 0 : getContentType().hashCode());
hashCode = prime * hashCode + ((getCompressionType() == null) ? 0 : getCompressionType().hashCode());
hashCode = prime * hashCode + ((getDataSource() == null) ? 0 : getDataSource().hashCode());
return hashCode;
}
@Override
public AutoMLJobChannel clone() {
try {
return (AutoMLJobChannel) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.sagemaker.model.transform.AutoMLJobChannelMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}