com.amazonaws.services.lookoutequipment.model.CreateInferenceSchedulerRequest Maven / Gradle / Ivy
Show all versions of aws-java-sdk-lookoutequipment Show documentation
/*
* Copyright 2019-2024 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.lookoutequipment.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see AWS API Documentation
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class CreateInferenceSchedulerRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
*
* The name of the previously trained machine learning model being used to create the inference scheduler.
*
*/
private String modelName;
/**
*
* The name of the inference scheduler being created.
*
*/
private String inferenceSchedulerName;
/**
*
* The interval (in minutes) of planned delay at the start of each inference segment. For example, if inference is
* set to run every ten minutes, the delay is set to five minutes and the time is 09:08. The inference scheduler
* will wake up at the configured interval (which, without a delay configured, would be 09:10) plus the additional
* five minute delay time (so 09:15) to check your Amazon S3 bucket. The delay provides a buffer for you to upload
* data at the same frequency, so that you don't have to stop and restart the scheduler when uploading new data.
*
*
* For more information, see Understanding the inference process.
*
*/
private Long dataDelayOffsetInMinutes;
/**
*
* How often data is uploaded to the source Amazon S3 bucket for the input data. The value chosen is the length of
* time between data uploads. For instance, if you select 5 minutes, Amazon Lookout for Equipment will upload the
* real-time data to the source bucket once every 5 minutes. This frequency also determines how often Amazon Lookout
* for Equipment runs inference on your data.
*
*
* For more information, see Understanding the inference process.
*
*/
private String dataUploadFrequency;
/**
*
* Specifies configuration information for the input data for the inference scheduler, including delimiter, format,
* and dataset location.
*
*/
private InferenceInputConfiguration dataInputConfiguration;
/**
*
* Specifies configuration information for the output results for the inference scheduler, including the S3 location
* for the output.
*
*/
private InferenceOutputConfiguration dataOutputConfiguration;
/**
*
* The Amazon Resource Name (ARN) of a role with permission to access the data source being used for the inference.
*
*/
private String roleArn;
/**
*
* Provides the identifier of the KMS key used to encrypt inference scheduler data by Amazon Lookout for Equipment.
*
*/
private String serverSideKmsKeyId;
/**
*
* A unique identifier for the request. If you do not set the client request token, Amazon Lookout for Equipment
* generates one.
*
*/
private String clientToken;
/**
*
* Any tags associated with the inference scheduler.
*
*/
private java.util.List tags;
/**
*
* The name of the previously trained machine learning model being used to create the inference scheduler.
*
*
* @param modelName
* The name of the previously trained machine learning model being used to create the inference scheduler.
*/
public void setModelName(String modelName) {
this.modelName = modelName;
}
/**
*
* The name of the previously trained machine learning model being used to create the inference scheduler.
*
*
* @return The name of the previously trained machine learning model being used to create the inference scheduler.
*/
public String getModelName() {
return this.modelName;
}
/**
*
* The name of the previously trained machine learning model being used to create the inference scheduler.
*
*
* @param modelName
* The name of the previously trained machine learning model being used to create the inference scheduler.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateInferenceSchedulerRequest withModelName(String modelName) {
setModelName(modelName);
return this;
}
/**
*
* The name of the inference scheduler being created.
*
*
* @param inferenceSchedulerName
* The name of the inference scheduler being created.
*/
public void setInferenceSchedulerName(String inferenceSchedulerName) {
this.inferenceSchedulerName = inferenceSchedulerName;
}
/**
*
* The name of the inference scheduler being created.
*
*
* @return The name of the inference scheduler being created.
*/
public String getInferenceSchedulerName() {
return this.inferenceSchedulerName;
}
/**
*
* The name of the inference scheduler being created.
*
*
* @param inferenceSchedulerName
* The name of the inference scheduler being created.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateInferenceSchedulerRequest withInferenceSchedulerName(String inferenceSchedulerName) {
setInferenceSchedulerName(inferenceSchedulerName);
return this;
}
/**
*
* The interval (in minutes) of planned delay at the start of each inference segment. For example, if inference is
* set to run every ten minutes, the delay is set to five minutes and the time is 09:08. The inference scheduler
* will wake up at the configured interval (which, without a delay configured, would be 09:10) plus the additional
* five minute delay time (so 09:15) to check your Amazon S3 bucket. The delay provides a buffer for you to upload
* data at the same frequency, so that you don't have to stop and restart the scheduler when uploading new data.
*
*
* For more information, see Understanding the inference process.
*
*
* @param dataDelayOffsetInMinutes
* The interval (in minutes) of planned delay at the start of each inference segment. For example, if
* inference is set to run every ten minutes, the delay is set to five minutes and the time is 09:08. The
* inference scheduler will wake up at the configured interval (which, without a delay configured, would be
* 09:10) plus the additional five minute delay time (so 09:15) to check your Amazon S3 bucket. The delay
* provides a buffer for you to upload data at the same frequency, so that you don't have to stop and restart
* the scheduler when uploading new data.
*
* For more information, see Understanding the inference process.
*/
public void setDataDelayOffsetInMinutes(Long dataDelayOffsetInMinutes) {
this.dataDelayOffsetInMinutes = dataDelayOffsetInMinutes;
}
/**
*
* The interval (in minutes) of planned delay at the start of each inference segment. For example, if inference is
* set to run every ten minutes, the delay is set to five minutes and the time is 09:08. The inference scheduler
* will wake up at the configured interval (which, without a delay configured, would be 09:10) plus the additional
* five minute delay time (so 09:15) to check your Amazon S3 bucket. The delay provides a buffer for you to upload
* data at the same frequency, so that you don't have to stop and restart the scheduler when uploading new data.
*
*
* For more information, see Understanding the inference process.
*
*
* @return The interval (in minutes) of planned delay at the start of each inference segment. For example, if
* inference is set to run every ten minutes, the delay is set to five minutes and the time is 09:08. The
* inference scheduler will wake up at the configured interval (which, without a delay configured, would be
* 09:10) plus the additional five minute delay time (so 09:15) to check your Amazon S3 bucket. The delay
* provides a buffer for you to upload data at the same frequency, so that you don't have to stop and
* restart the scheduler when uploading new data.
*
* For more information, see Understanding the inference process.
*/
public Long getDataDelayOffsetInMinutes() {
return this.dataDelayOffsetInMinutes;
}
/**
*
* The interval (in minutes) of planned delay at the start of each inference segment. For example, if inference is
* set to run every ten minutes, the delay is set to five minutes and the time is 09:08. The inference scheduler
* will wake up at the configured interval (which, without a delay configured, would be 09:10) plus the additional
* five minute delay time (so 09:15) to check your Amazon S3 bucket. The delay provides a buffer for you to upload
* data at the same frequency, so that you don't have to stop and restart the scheduler when uploading new data.
*
*
* For more information, see Understanding the inference process.
*
*
* @param dataDelayOffsetInMinutes
* The interval (in minutes) of planned delay at the start of each inference segment. For example, if
* inference is set to run every ten minutes, the delay is set to five minutes and the time is 09:08. The
* inference scheduler will wake up at the configured interval (which, without a delay configured, would be
* 09:10) plus the additional five minute delay time (so 09:15) to check your Amazon S3 bucket. The delay
* provides a buffer for you to upload data at the same frequency, so that you don't have to stop and restart
* the scheduler when uploading new data.
*
* For more information, see Understanding the inference process.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateInferenceSchedulerRequest withDataDelayOffsetInMinutes(Long dataDelayOffsetInMinutes) {
setDataDelayOffsetInMinutes(dataDelayOffsetInMinutes);
return this;
}
/**
*
* How often data is uploaded to the source Amazon S3 bucket for the input data. The value chosen is the length of
* time between data uploads. For instance, if you select 5 minutes, Amazon Lookout for Equipment will upload the
* real-time data to the source bucket once every 5 minutes. This frequency also determines how often Amazon Lookout
* for Equipment runs inference on your data.
*
*
* For more information, see Understanding the inference process.
*
*
* @param dataUploadFrequency
* How often data is uploaded to the source Amazon S3 bucket for the input data. The value chosen is the
* length of time between data uploads. For instance, if you select 5 minutes, Amazon Lookout for Equipment
* will upload the real-time data to the source bucket once every 5 minutes. This frequency also determines
* how often Amazon Lookout for Equipment runs inference on your data.
*
* For more information, see Understanding the inference process.
* @see DataUploadFrequency
*/
public void setDataUploadFrequency(String dataUploadFrequency) {
this.dataUploadFrequency = dataUploadFrequency;
}
/**
*
* How often data is uploaded to the source Amazon S3 bucket for the input data. The value chosen is the length of
* time between data uploads. For instance, if you select 5 minutes, Amazon Lookout for Equipment will upload the
* real-time data to the source bucket once every 5 minutes. This frequency also determines how often Amazon Lookout
* for Equipment runs inference on your data.
*
*
* For more information, see Understanding the inference process.
*
*
* @return How often data is uploaded to the source Amazon S3 bucket for the input data. The value chosen is the
* length of time between data uploads. For instance, if you select 5 minutes, Amazon Lookout for Equipment
* will upload the real-time data to the source bucket once every 5 minutes. This frequency also determines
* how often Amazon Lookout for Equipment runs inference on your data.
*
* For more information, see Understanding the inference process.
* @see DataUploadFrequency
*/
public String getDataUploadFrequency() {
return this.dataUploadFrequency;
}
/**
*
* How often data is uploaded to the source Amazon S3 bucket for the input data. The value chosen is the length of
* time between data uploads. For instance, if you select 5 minutes, Amazon Lookout for Equipment will upload the
* real-time data to the source bucket once every 5 minutes. This frequency also determines how often Amazon Lookout
* for Equipment runs inference on your data.
*
*
* For more information, see Understanding the inference process.
*
*
* @param dataUploadFrequency
* How often data is uploaded to the source Amazon S3 bucket for the input data. The value chosen is the
* length of time between data uploads. For instance, if you select 5 minutes, Amazon Lookout for Equipment
* will upload the real-time data to the source bucket once every 5 minutes. This frequency also determines
* how often Amazon Lookout for Equipment runs inference on your data.
*
* For more information, see Understanding the inference process.
* @return Returns a reference to this object so that method calls can be chained together.
* @see DataUploadFrequency
*/
public CreateInferenceSchedulerRequest withDataUploadFrequency(String dataUploadFrequency) {
setDataUploadFrequency(dataUploadFrequency);
return this;
}
/**
*
* How often data is uploaded to the source Amazon S3 bucket for the input data. The value chosen is the length of
* time between data uploads. For instance, if you select 5 minutes, Amazon Lookout for Equipment will upload the
* real-time data to the source bucket once every 5 minutes. This frequency also determines how often Amazon Lookout
* for Equipment runs inference on your data.
*
*
* For more information, see Understanding the inference process.
*
*
* @param dataUploadFrequency
* How often data is uploaded to the source Amazon S3 bucket for the input data. The value chosen is the
* length of time between data uploads. For instance, if you select 5 minutes, Amazon Lookout for Equipment
* will upload the real-time data to the source bucket once every 5 minutes. This frequency also determines
* how often Amazon Lookout for Equipment runs inference on your data.
*
* For more information, see Understanding the inference process.
* @return Returns a reference to this object so that method calls can be chained together.
* @see DataUploadFrequency
*/
public CreateInferenceSchedulerRequest withDataUploadFrequency(DataUploadFrequency dataUploadFrequency) {
this.dataUploadFrequency = dataUploadFrequency.toString();
return this;
}
/**
*
* Specifies configuration information for the input data for the inference scheduler, including delimiter, format,
* and dataset location.
*
*
* @param dataInputConfiguration
* Specifies configuration information for the input data for the inference scheduler, including delimiter,
* format, and dataset location.
*/
public void setDataInputConfiguration(InferenceInputConfiguration dataInputConfiguration) {
this.dataInputConfiguration = dataInputConfiguration;
}
/**
*
* Specifies configuration information for the input data for the inference scheduler, including delimiter, format,
* and dataset location.
*
*
* @return Specifies configuration information for the input data for the inference scheduler, including delimiter,
* format, and dataset location.
*/
public InferenceInputConfiguration getDataInputConfiguration() {
return this.dataInputConfiguration;
}
/**
*
* Specifies configuration information for the input data for the inference scheduler, including delimiter, format,
* and dataset location.
*
*
* @param dataInputConfiguration
* Specifies configuration information for the input data for the inference scheduler, including delimiter,
* format, and dataset location.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateInferenceSchedulerRequest withDataInputConfiguration(InferenceInputConfiguration dataInputConfiguration) {
setDataInputConfiguration(dataInputConfiguration);
return this;
}
/**
*
* Specifies configuration information for the output results for the inference scheduler, including the S3 location
* for the output.
*
*
* @param dataOutputConfiguration
* Specifies configuration information for the output results for the inference scheduler, including the S3
* location for the output.
*/
public void setDataOutputConfiguration(InferenceOutputConfiguration dataOutputConfiguration) {
this.dataOutputConfiguration = dataOutputConfiguration;
}
/**
*
* Specifies configuration information for the output results for the inference scheduler, including the S3 location
* for the output.
*
*
* @return Specifies configuration information for the output results for the inference scheduler, including the S3
* location for the output.
*/
public InferenceOutputConfiguration getDataOutputConfiguration() {
return this.dataOutputConfiguration;
}
/**
*
* Specifies configuration information for the output results for the inference scheduler, including the S3 location
* for the output.
*
*
* @param dataOutputConfiguration
* Specifies configuration information for the output results for the inference scheduler, including the S3
* location for the output.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateInferenceSchedulerRequest withDataOutputConfiguration(InferenceOutputConfiguration dataOutputConfiguration) {
setDataOutputConfiguration(dataOutputConfiguration);
return this;
}
/**
*
* The Amazon Resource Name (ARN) of a role with permission to access the data source being used for the inference.
*
*
* @param roleArn
* The Amazon Resource Name (ARN) of a role with permission to access the data source being used for the
* inference.
*/
public void setRoleArn(String roleArn) {
this.roleArn = roleArn;
}
/**
*
* The Amazon Resource Name (ARN) of a role with permission to access the data source being used for the inference.
*
*
* @return The Amazon Resource Name (ARN) of a role with permission to access the data source being used for the
* inference.
*/
public String getRoleArn() {
return this.roleArn;
}
/**
*
* The Amazon Resource Name (ARN) of a role with permission to access the data source being used for the inference.
*
*
* @param roleArn
* The Amazon Resource Name (ARN) of a role with permission to access the data source being used for the
* inference.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateInferenceSchedulerRequest withRoleArn(String roleArn) {
setRoleArn(roleArn);
return this;
}
/**
*
* Provides the identifier of the KMS key used to encrypt inference scheduler data by Amazon Lookout for Equipment.
*
*
* @param serverSideKmsKeyId
* Provides the identifier of the KMS key used to encrypt inference scheduler data by Amazon Lookout for
* Equipment.
*/
public void setServerSideKmsKeyId(String serverSideKmsKeyId) {
this.serverSideKmsKeyId = serverSideKmsKeyId;
}
/**
*
* Provides the identifier of the KMS key used to encrypt inference scheduler data by Amazon Lookout for Equipment.
*
*
* @return Provides the identifier of the KMS key used to encrypt inference scheduler data by Amazon Lookout for
* Equipment.
*/
public String getServerSideKmsKeyId() {
return this.serverSideKmsKeyId;
}
/**
*
* Provides the identifier of the KMS key used to encrypt inference scheduler data by Amazon Lookout for Equipment.
*
*
* @param serverSideKmsKeyId
* Provides the identifier of the KMS key used to encrypt inference scheduler data by Amazon Lookout for
* Equipment.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateInferenceSchedulerRequest withServerSideKmsKeyId(String serverSideKmsKeyId) {
setServerSideKmsKeyId(serverSideKmsKeyId);
return this;
}
/**
*
* A unique identifier for the request. If you do not set the client request token, Amazon Lookout for Equipment
* generates one.
*
*
* @param clientToken
* A unique identifier for the request. If you do not set the client request token, Amazon Lookout for
* Equipment generates one.
*/
public void setClientToken(String clientToken) {
this.clientToken = clientToken;
}
/**
*
* A unique identifier for the request. If you do not set the client request token, Amazon Lookout for Equipment
* generates one.
*
*
* @return A unique identifier for the request. If you do not set the client request token, Amazon Lookout for
* Equipment generates one.
*/
public String getClientToken() {
return this.clientToken;
}
/**
*
* A unique identifier for the request. If you do not set the client request token, Amazon Lookout for Equipment
* generates one.
*
*
* @param clientToken
* A unique identifier for the request. If you do not set the client request token, Amazon Lookout for
* Equipment generates one.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateInferenceSchedulerRequest withClientToken(String clientToken) {
setClientToken(clientToken);
return this;
}
/**
*
* Any tags associated with the inference scheduler.
*
*
* @return Any tags associated with the inference scheduler.
*/
public java.util.List getTags() {
return tags;
}
/**
*
* Any tags associated with the inference scheduler.
*
*
* @param tags
* Any tags associated with the inference scheduler.
*/
public void setTags(java.util.Collection tags) {
if (tags == null) {
this.tags = null;
return;
}
this.tags = new java.util.ArrayList(tags);
}
/**
*
* Any tags associated with the inference scheduler.
*
*
* NOTE: This method appends the values to the existing list (if any). Use
* {@link #setTags(java.util.Collection)} or {@link #withTags(java.util.Collection)} if you want to override the
* existing values.
*
*
* @param tags
* Any tags associated with the inference scheduler.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateInferenceSchedulerRequest withTags(Tag... tags) {
if (this.tags == null) {
setTags(new java.util.ArrayList(tags.length));
}
for (Tag ele : tags) {
this.tags.add(ele);
}
return this;
}
/**
*
* Any tags associated with the inference scheduler.
*
*
* @param tags
* Any tags associated with the inference scheduler.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateInferenceSchedulerRequest withTags(java.util.Collection tags) {
setTags(tags);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getModelName() != null)
sb.append("ModelName: ").append(getModelName()).append(",");
if (getInferenceSchedulerName() != null)
sb.append("InferenceSchedulerName: ").append(getInferenceSchedulerName()).append(",");
if (getDataDelayOffsetInMinutes() != null)
sb.append("DataDelayOffsetInMinutes: ").append(getDataDelayOffsetInMinutes()).append(",");
if (getDataUploadFrequency() != null)
sb.append("DataUploadFrequency: ").append(getDataUploadFrequency()).append(",");
if (getDataInputConfiguration() != null)
sb.append("DataInputConfiguration: ").append(getDataInputConfiguration()).append(",");
if (getDataOutputConfiguration() != null)
sb.append("DataOutputConfiguration: ").append(getDataOutputConfiguration()).append(",");
if (getRoleArn() != null)
sb.append("RoleArn: ").append(getRoleArn()).append(",");
if (getServerSideKmsKeyId() != null)
sb.append("ServerSideKmsKeyId: ").append(getServerSideKmsKeyId()).append(",");
if (getClientToken() != null)
sb.append("ClientToken: ").append(getClientToken()).append(",");
if (getTags() != null)
sb.append("Tags: ").append(getTags());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CreateInferenceSchedulerRequest == false)
return false;
CreateInferenceSchedulerRequest other = (CreateInferenceSchedulerRequest) obj;
if (other.getModelName() == null ^ this.getModelName() == null)
return false;
if (other.getModelName() != null && other.getModelName().equals(this.getModelName()) == false)
return false;
if (other.getInferenceSchedulerName() == null ^ this.getInferenceSchedulerName() == null)
return false;
if (other.getInferenceSchedulerName() != null && other.getInferenceSchedulerName().equals(this.getInferenceSchedulerName()) == false)
return false;
if (other.getDataDelayOffsetInMinutes() == null ^ this.getDataDelayOffsetInMinutes() == null)
return false;
if (other.getDataDelayOffsetInMinutes() != null && other.getDataDelayOffsetInMinutes().equals(this.getDataDelayOffsetInMinutes()) == false)
return false;
if (other.getDataUploadFrequency() == null ^ this.getDataUploadFrequency() == null)
return false;
if (other.getDataUploadFrequency() != null && other.getDataUploadFrequency().equals(this.getDataUploadFrequency()) == false)
return false;
if (other.getDataInputConfiguration() == null ^ this.getDataInputConfiguration() == null)
return false;
if (other.getDataInputConfiguration() != null && other.getDataInputConfiguration().equals(this.getDataInputConfiguration()) == false)
return false;
if (other.getDataOutputConfiguration() == null ^ this.getDataOutputConfiguration() == null)
return false;
if (other.getDataOutputConfiguration() != null && other.getDataOutputConfiguration().equals(this.getDataOutputConfiguration()) == false)
return false;
if (other.getRoleArn() == null ^ this.getRoleArn() == null)
return false;
if (other.getRoleArn() != null && other.getRoleArn().equals(this.getRoleArn()) == false)
return false;
if (other.getServerSideKmsKeyId() == null ^ this.getServerSideKmsKeyId() == null)
return false;
if (other.getServerSideKmsKeyId() != null && other.getServerSideKmsKeyId().equals(this.getServerSideKmsKeyId()) == false)
return false;
if (other.getClientToken() == null ^ this.getClientToken() == null)
return false;
if (other.getClientToken() != null && other.getClientToken().equals(this.getClientToken()) == false)
return false;
if (other.getTags() == null ^ this.getTags() == null)
return false;
if (other.getTags() != null && other.getTags().equals(this.getTags()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getModelName() == null) ? 0 : getModelName().hashCode());
hashCode = prime * hashCode + ((getInferenceSchedulerName() == null) ? 0 : getInferenceSchedulerName().hashCode());
hashCode = prime * hashCode + ((getDataDelayOffsetInMinutes() == null) ? 0 : getDataDelayOffsetInMinutes().hashCode());
hashCode = prime * hashCode + ((getDataUploadFrequency() == null) ? 0 : getDataUploadFrequency().hashCode());
hashCode = prime * hashCode + ((getDataInputConfiguration() == null) ? 0 : getDataInputConfiguration().hashCode());
hashCode = prime * hashCode + ((getDataOutputConfiguration() == null) ? 0 : getDataOutputConfiguration().hashCode());
hashCode = prime * hashCode + ((getRoleArn() == null) ? 0 : getRoleArn().hashCode());
hashCode = prime * hashCode + ((getServerSideKmsKeyId() == null) ? 0 : getServerSideKmsKeyId().hashCode());
hashCode = prime * hashCode + ((getClientToken() == null) ? 0 : getClientToken().hashCode());
hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode());
return hashCode;
}
@Override
public CreateInferenceSchedulerRequest clone() {
return (CreateInferenceSchedulerRequest) super.clone();
}
}