
com.amazonaws.services.applicationdiscovery.model.ContinuousExportDescription Maven / Gradle / Ivy
/*
* Copyright 2018-2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.applicationdiscovery.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
*
* A list of continuous export descriptions.
*
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ContinuousExportDescription implements Serializable, Cloneable, StructuredPojo {
/**
*
* The unique ID assigned to this export.
*
*/
private String exportId;
/**
*
* Describes the status of the export. Can be one of the following values:
*
*
* -
*
* START_IN_PROGRESS - setting up resources to start continuous export.
*
*
* -
*
* START_FAILED - an error occurred setting up continuous export. To recover, call start-continuous-export again.
*
*
* -
*
* ACTIVE - data is being exported to the customer bucket.
*
*
* -
*
* ERROR - an error occurred during export. To fix the issue, call stop-continuous-export and
* start-continuous-export.
*
*
* -
*
* STOP_IN_PROGRESS - stopping the export.
*
*
* -
*
* STOP_FAILED - an error occurred stopping the export. To recover, call stop-continuous-export again.
*
*
* -
*
* INACTIVE - the continuous export has been stopped. Data is no longer being exported to the customer bucket.
*
*
*
*/
private String status;
/**
*
* Contains information about any errors that have occurred. This data type can have the following values:
*
*
* -
*
* ACCESS_DENIED - You don’t have permission to start Data Exploration in Amazon Athena. Contact your Amazon Web
* Services administrator for help. For more information, see Setting Up Amazon Web
* Services Application Discovery Service in the Application Discovery Service User Guide.
*
*
* -
*
* DELIVERY_STREAM_LIMIT_FAILURE - You reached the limit for Amazon Kinesis Data Firehose delivery streams. Reduce
* the number of streams or request a limit increase and try again. For more information, see Kinesis Data Streams
* Limits in the Amazon Kinesis Data Streams Developer Guide.
*
*
* -
*
* FIREHOSE_ROLE_MISSING - The Data Exploration feature is in an error state because your user is missing the Amazon
* Web ServicesApplicationDiscoveryServiceFirehose role. Turn on Data Exploration in Amazon Athena and try again.
* For more information, see Creating the Amazon Web ServicesApplicationDiscoveryServiceFirehose Role in the Application Discovery
* Service User Guide.
*
*
* -
*
* FIREHOSE_STREAM_DOES_NOT_EXIST - The Data Exploration feature is in an error state because your user is missing
* one or more of the Kinesis data delivery streams.
*
*
* -
*
* INTERNAL_FAILURE - The Data Exploration feature is in an error state because of an internal failure. Try again
* later. If this problem persists, contact Amazon Web Services Support.
*
*
* -
*
* LAKE_FORMATION_ACCESS_DENIED - You don't have sufficient lake formation permissions to start continuous export.
* For more information, see Upgrading Amazon Web
* Services Glue Data Permissions to the Amazon Web Services Lake Formation Model in the Amazon Web Services
* Lake Formation Developer Guide.
*
*
* You can use one of the following two ways to resolve this issue.
*
*
* -
*
* If you don’t want to use the Lake Formation permission model, you can change the default Data Catalog settings to
* use only Amazon Web Services Identity and Access Management (IAM) access control for new databases. For more
* information, see Change Data Catalog Settings in the Lake Formation Developer Guide.
*
*
* -
*
* You can give the service-linked IAM roles AWSServiceRoleForApplicationDiscoveryServiceContinuousExport and
* AWSApplicationDiscoveryServiceFirehose the required Lake Formation permissions. For more information, see Granting Database
* Permissions in the Lake Formation Developer Guide.
*
*
* -
*
* AWSServiceRoleForApplicationDiscoveryServiceContinuousExport - Grant database creator permissions, which gives
* the role database creation ability and implicit permissions for any created tables. For more information, see Implicit Lake Formation
* Permissions in the Lake Formation Developer Guide.
*
*
* -
*
* AWSApplicationDiscoveryServiceFirehose - Grant describe permissions for all tables in the database.
*
*
*
*
*
*
* -
*
* S3_BUCKET_LIMIT_FAILURE - You reached the limit for Amazon S3 buckets. Reduce the number of S3 buckets or request
* a limit increase and try again. For more information, see Bucket Restrictions and
* Limitations in the Amazon Simple Storage Service Developer Guide.
*
*
* -
*
* S3_NOT_SIGNED_UP - Your account is not signed up for the Amazon S3 service. You must sign up before you can use
* Amazon S3. You can sign up at the following URL: https://aws.amazon.com/s3.
*
*
*
*/
private String statusDetail;
/**
*
* The name of the s3 bucket where the export data parquet files are stored.
*
*/
private String s3Bucket;
/**
*
* The timestamp representing when the continuous export was started.
*
*/
private java.util.Date startTime;
/**
*
* The timestamp that represents when this continuous export was stopped.
*
*/
private java.util.Date stopTime;
/**
*
* The type of data collector used to gather this data (currently only offered for AGENT).
*
*/
private String dataSource;
/**
*
* An object which describes how the data is stored.
*
*
* -
*
* databaseName
- the name of the Glue database used to store the schema.
*
*
*
*/
private java.util.Map schemaStorageConfig;
/**
*
* The unique ID assigned to this export.
*
*
* @param exportId
* The unique ID assigned to this export.
*/
public void setExportId(String exportId) {
this.exportId = exportId;
}
/**
*
* The unique ID assigned to this export.
*
*
* @return The unique ID assigned to this export.
*/
public String getExportId() {
return this.exportId;
}
/**
*
* The unique ID assigned to this export.
*
*
* @param exportId
* The unique ID assigned to this export.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ContinuousExportDescription withExportId(String exportId) {
setExportId(exportId);
return this;
}
/**
*
* Describes the status of the export. Can be one of the following values:
*
*
* -
*
* START_IN_PROGRESS - setting up resources to start continuous export.
*
*
* -
*
* START_FAILED - an error occurred setting up continuous export. To recover, call start-continuous-export again.
*
*
* -
*
* ACTIVE - data is being exported to the customer bucket.
*
*
* -
*
* ERROR - an error occurred during export. To fix the issue, call stop-continuous-export and
* start-continuous-export.
*
*
* -
*
* STOP_IN_PROGRESS - stopping the export.
*
*
* -
*
* STOP_FAILED - an error occurred stopping the export. To recover, call stop-continuous-export again.
*
*
* -
*
* INACTIVE - the continuous export has been stopped. Data is no longer being exported to the customer bucket.
*
*
*
*
* @param status
* Describes the status of the export. Can be one of the following values:
*
* -
*
* START_IN_PROGRESS - setting up resources to start continuous export.
*
*
* -
*
* START_FAILED - an error occurred setting up continuous export. To recover, call start-continuous-export
* again.
*
*
* -
*
* ACTIVE - data is being exported to the customer bucket.
*
*
* -
*
* ERROR - an error occurred during export. To fix the issue, call stop-continuous-export and
* start-continuous-export.
*
*
* -
*
* STOP_IN_PROGRESS - stopping the export.
*
*
* -
*
* STOP_FAILED - an error occurred stopping the export. To recover, call stop-continuous-export again.
*
*
* -
*
* INACTIVE - the continuous export has been stopped. Data is no longer being exported to the customer
* bucket.
*
*
* @see ContinuousExportStatus
*/
public void setStatus(String status) {
this.status = status;
}
/**
*
* Describes the status of the export. Can be one of the following values:
*
*
* -
*
* START_IN_PROGRESS - setting up resources to start continuous export.
*
*
* -
*
* START_FAILED - an error occurred setting up continuous export. To recover, call start-continuous-export again.
*
*
* -
*
* ACTIVE - data is being exported to the customer bucket.
*
*
* -
*
* ERROR - an error occurred during export. To fix the issue, call stop-continuous-export and
* start-continuous-export.
*
*
* -
*
* STOP_IN_PROGRESS - stopping the export.
*
*
* -
*
* STOP_FAILED - an error occurred stopping the export. To recover, call stop-continuous-export again.
*
*
* -
*
* INACTIVE - the continuous export has been stopped. Data is no longer being exported to the customer bucket.
*
*
*
*
* @return Describes the status of the export. Can be one of the following values:
*
* -
*
* START_IN_PROGRESS - setting up resources to start continuous export.
*
*
* -
*
* START_FAILED - an error occurred setting up continuous export. To recover, call start-continuous-export
* again.
*
*
* -
*
* ACTIVE - data is being exported to the customer bucket.
*
*
* -
*
* ERROR - an error occurred during export. To fix the issue, call stop-continuous-export and
* start-continuous-export.
*
*
* -
*
* STOP_IN_PROGRESS - stopping the export.
*
*
* -
*
* STOP_FAILED - an error occurred stopping the export. To recover, call stop-continuous-export again.
*
*
* -
*
* INACTIVE - the continuous export has been stopped. Data is no longer being exported to the customer
* bucket.
*
*
* @see ContinuousExportStatus
*/
public String getStatus() {
return this.status;
}
/**
*
* Describes the status of the export. Can be one of the following values:
*
*
* -
*
* START_IN_PROGRESS - setting up resources to start continuous export.
*
*
* -
*
* START_FAILED - an error occurred setting up continuous export. To recover, call start-continuous-export again.
*
*
* -
*
* ACTIVE - data is being exported to the customer bucket.
*
*
* -
*
* ERROR - an error occurred during export. To fix the issue, call stop-continuous-export and
* start-continuous-export.
*
*
* -
*
* STOP_IN_PROGRESS - stopping the export.
*
*
* -
*
* STOP_FAILED - an error occurred stopping the export. To recover, call stop-continuous-export again.
*
*
* -
*
* INACTIVE - the continuous export has been stopped. Data is no longer being exported to the customer bucket.
*
*
*
*
* @param status
* Describes the status of the export. Can be one of the following values:
*
* -
*
* START_IN_PROGRESS - setting up resources to start continuous export.
*
*
* -
*
* START_FAILED - an error occurred setting up continuous export. To recover, call start-continuous-export
* again.
*
*
* -
*
* ACTIVE - data is being exported to the customer bucket.
*
*
* -
*
* ERROR - an error occurred during export. To fix the issue, call stop-continuous-export and
* start-continuous-export.
*
*
* -
*
* STOP_IN_PROGRESS - stopping the export.
*
*
* -
*
* STOP_FAILED - an error occurred stopping the export. To recover, call stop-continuous-export again.
*
*
* -
*
* INACTIVE - the continuous export has been stopped. Data is no longer being exported to the customer
* bucket.
*
*
* @return Returns a reference to this object so that method calls can be chained together.
* @see ContinuousExportStatus
*/
public ContinuousExportDescription withStatus(String status) {
setStatus(status);
return this;
}
/**
*
* Describes the status of the export. Can be one of the following values:
*
*
* -
*
* START_IN_PROGRESS - setting up resources to start continuous export.
*
*
* -
*
* START_FAILED - an error occurred setting up continuous export. To recover, call start-continuous-export again.
*
*
* -
*
* ACTIVE - data is being exported to the customer bucket.
*
*
* -
*
* ERROR - an error occurred during export. To fix the issue, call stop-continuous-export and
* start-continuous-export.
*
*
* -
*
* STOP_IN_PROGRESS - stopping the export.
*
*
* -
*
* STOP_FAILED - an error occurred stopping the export. To recover, call stop-continuous-export again.
*
*
* -
*
* INACTIVE - the continuous export has been stopped. Data is no longer being exported to the customer bucket.
*
*
*
*
* @param status
* Describes the status of the export. Can be one of the following values:
*
* -
*
* START_IN_PROGRESS - setting up resources to start continuous export.
*
*
* -
*
* START_FAILED - an error occurred setting up continuous export. To recover, call start-continuous-export
* again.
*
*
* -
*
* ACTIVE - data is being exported to the customer bucket.
*
*
* -
*
* ERROR - an error occurred during export. To fix the issue, call stop-continuous-export and
* start-continuous-export.
*
*
* -
*
* STOP_IN_PROGRESS - stopping the export.
*
*
* -
*
* STOP_FAILED - an error occurred stopping the export. To recover, call stop-continuous-export again.
*
*
* -
*
* INACTIVE - the continuous export has been stopped. Data is no longer being exported to the customer
* bucket.
*
*
* @return Returns a reference to this object so that method calls can be chained together.
* @see ContinuousExportStatus
*/
public ContinuousExportDescription withStatus(ContinuousExportStatus status) {
this.status = status.toString();
return this;
}
/**
*
* Contains information about any errors that have occurred. This data type can have the following values:
*
*
* -
*
* ACCESS_DENIED - You don’t have permission to start Data Exploration in Amazon Athena. Contact your Amazon Web
* Services administrator for help. For more information, see Setting Up Amazon Web
* Services Application Discovery Service in the Application Discovery Service User Guide.
*
*
* -
*
* DELIVERY_STREAM_LIMIT_FAILURE - You reached the limit for Amazon Kinesis Data Firehose delivery streams. Reduce
* the number of streams or request a limit increase and try again. For more information, see Kinesis Data Streams
* Limits in the Amazon Kinesis Data Streams Developer Guide.
*
*
* -
*
* FIREHOSE_ROLE_MISSING - The Data Exploration feature is in an error state because your user is missing the Amazon
* Web ServicesApplicationDiscoveryServiceFirehose role. Turn on Data Exploration in Amazon Athena and try again.
* For more information, see Creating the Amazon Web ServicesApplicationDiscoveryServiceFirehose Role in the Application Discovery
* Service User Guide.
*
*
* -
*
* FIREHOSE_STREAM_DOES_NOT_EXIST - The Data Exploration feature is in an error state because your user is missing
* one or more of the Kinesis data delivery streams.
*
*
* -
*
* INTERNAL_FAILURE - The Data Exploration feature is in an error state because of an internal failure. Try again
* later. If this problem persists, contact Amazon Web Services Support.
*
*
* -
*
* LAKE_FORMATION_ACCESS_DENIED - You don't have sufficient lake formation permissions to start continuous export.
* For more information, see Upgrading Amazon Web
* Services Glue Data Permissions to the Amazon Web Services Lake Formation Model in the Amazon Web Services
* Lake Formation Developer Guide.
*
*
* You can use one of the following two ways to resolve this issue.
*
*
* -
*
* If you don’t want to use the Lake Formation permission model, you can change the default Data Catalog settings to
* use only Amazon Web Services Identity and Access Management (IAM) access control for new databases. For more
* information, see Change Data Catalog Settings in the Lake Formation Developer Guide.
*
*
* -
*
* You can give the service-linked IAM roles AWSServiceRoleForApplicationDiscoveryServiceContinuousExport and
* AWSApplicationDiscoveryServiceFirehose the required Lake Formation permissions. For more information, see Granting Database
* Permissions in the Lake Formation Developer Guide.
*
*
* -
*
* AWSServiceRoleForApplicationDiscoveryServiceContinuousExport - Grant database creator permissions, which gives
* the role database creation ability and implicit permissions for any created tables. For more information, see Implicit Lake Formation
* Permissions in the Lake Formation Developer Guide.
*
*
* -
*
* AWSApplicationDiscoveryServiceFirehose - Grant describe permissions for all tables in the database.
*
*
*
*
*
*
* -
*
* S3_BUCKET_LIMIT_FAILURE - You reached the limit for Amazon S3 buckets. Reduce the number of S3 buckets or request
* a limit increase and try again. For more information, see Bucket Restrictions and
* Limitations in the Amazon Simple Storage Service Developer Guide.
*
*
* -
*
* S3_NOT_SIGNED_UP - Your account is not signed up for the Amazon S3 service. You must sign up before you can use
* Amazon S3. You can sign up at the following URL: https://aws.amazon.com/s3.
*
*
*
*
* @param statusDetail
* Contains information about any errors that have occurred. This data type can have the following
* values:
*
* -
*
* ACCESS_DENIED - You don’t have permission to start Data Exploration in Amazon Athena. Contact your Amazon
* Web Services administrator for help. For more information, see Setting Up Amazon
* Web Services Application Discovery Service in the Application Discovery Service User Guide.
*
*
* -
*
* DELIVERY_STREAM_LIMIT_FAILURE - You reached the limit for Amazon Kinesis Data Firehose delivery streams.
* Reduce the number of streams or request a limit increase and try again. For more information, see Kinesis Data Streams
* Limits in the Amazon Kinesis Data Streams Developer Guide.
*
*
* -
*
* FIREHOSE_ROLE_MISSING - The Data Exploration feature is in an error state because your user is missing the
* Amazon Web ServicesApplicationDiscoveryServiceFirehose role. Turn on Data Exploration in Amazon Athena and
* try again. For more information, see Creating the Amazon Web ServicesApplicationDiscoveryServiceFirehose Role in the Application Discovery
* Service User Guide.
*
*
* -
*
* FIREHOSE_STREAM_DOES_NOT_EXIST - The Data Exploration feature is in an error state because your user is
* missing one or more of the Kinesis data delivery streams.
*
*
* -
*
* INTERNAL_FAILURE - The Data Exploration feature is in an error state because of an internal failure. Try
* again later. If this problem persists, contact Amazon Web Services Support.
*
*
* -
*
* LAKE_FORMATION_ACCESS_DENIED - You don't have sufficient lake formation permissions to start continuous
* export. For more information, see Upgrading
* Amazon Web Services Glue Data Permissions to the Amazon Web Services Lake Formation Model in the
* Amazon Web Services Lake Formation Developer Guide.
*
*
* You can use one of the following two ways to resolve this issue.
*
*
* -
*
* If you don’t want to use the Lake Formation permission model, you can change the default Data Catalog
* settings to use only Amazon Web Services Identity and Access Management (IAM) access control for new
* databases. For more information, see Change Data Catalog Settings in the Lake Formation Developer Guide.
*
*
* -
*
* You can give the service-linked IAM roles AWSServiceRoleForApplicationDiscoveryServiceContinuousExport and
* AWSApplicationDiscoveryServiceFirehose the required Lake Formation permissions. For more information, see
*
* Granting Database Permissions in the Lake Formation Developer Guide.
*
*
* -
*
* AWSServiceRoleForApplicationDiscoveryServiceContinuousExport - Grant database creator permissions, which
* gives the role database creation ability and implicit permissions for any created tables. For more
* information, see
* Implicit Lake Formation Permissions in the Lake Formation Developer Guide.
*
*
* -
*
* AWSApplicationDiscoveryServiceFirehose - Grant describe permissions for all tables in the database.
*
*
*
*
*
*
* -
*
* S3_BUCKET_LIMIT_FAILURE - You reached the limit for Amazon S3 buckets. Reduce the number of S3 buckets or
* request a limit increase and try again. For more information, see Bucket Restrictions and
* Limitations in the Amazon Simple Storage Service Developer Guide.
*
*
* -
*
* S3_NOT_SIGNED_UP - Your account is not signed up for the Amazon S3 service. You must sign up before you
* can use Amazon S3. You can sign up at the following URL: https://aws.amazon.com/s3.
*
*
*/
public void setStatusDetail(String statusDetail) {
this.statusDetail = statusDetail;
}
/**
*
* Contains information about any errors that have occurred. This data type can have the following values:
*
*
* -
*
* ACCESS_DENIED - You don’t have permission to start Data Exploration in Amazon Athena. Contact your Amazon Web
* Services administrator for help. For more information, see Setting Up Amazon Web
* Services Application Discovery Service in the Application Discovery Service User Guide.
*
*
* -
*
* DELIVERY_STREAM_LIMIT_FAILURE - You reached the limit for Amazon Kinesis Data Firehose delivery streams. Reduce
* the number of streams or request a limit increase and try again. For more information, see Kinesis Data Streams
* Limits in the Amazon Kinesis Data Streams Developer Guide.
*
*
* -
*
* FIREHOSE_ROLE_MISSING - The Data Exploration feature is in an error state because your user is missing the Amazon
* Web ServicesApplicationDiscoveryServiceFirehose role. Turn on Data Exploration in Amazon Athena and try again.
* For more information, see Creating the Amazon Web ServicesApplicationDiscoveryServiceFirehose Role in the Application Discovery
* Service User Guide.
*
*
* -
*
* FIREHOSE_STREAM_DOES_NOT_EXIST - The Data Exploration feature is in an error state because your user is missing
* one or more of the Kinesis data delivery streams.
*
*
* -
*
* INTERNAL_FAILURE - The Data Exploration feature is in an error state because of an internal failure. Try again
* later. If this problem persists, contact Amazon Web Services Support.
*
*
* -
*
* LAKE_FORMATION_ACCESS_DENIED - You don't have sufficient lake formation permissions to start continuous export.
* For more information, see Upgrading Amazon Web
* Services Glue Data Permissions to the Amazon Web Services Lake Formation Model in the Amazon Web Services
* Lake Formation Developer Guide.
*
*
* You can use one of the following two ways to resolve this issue.
*
*
* -
*
* If you don’t want to use the Lake Formation permission model, you can change the default Data Catalog settings to
* use only Amazon Web Services Identity and Access Management (IAM) access control for new databases. For more
* information, see Change Data Catalog Settings in the Lake Formation Developer Guide.
*
*
* -
*
* You can give the service-linked IAM roles AWSServiceRoleForApplicationDiscoveryServiceContinuousExport and
* AWSApplicationDiscoveryServiceFirehose the required Lake Formation permissions. For more information, see Granting Database
* Permissions in the Lake Formation Developer Guide.
*
*
* -
*
* AWSServiceRoleForApplicationDiscoveryServiceContinuousExport - Grant database creator permissions, which gives
* the role database creation ability and implicit permissions for any created tables. For more information, see Implicit Lake Formation
* Permissions in the Lake Formation Developer Guide.
*
*
* -
*
* AWSApplicationDiscoveryServiceFirehose - Grant describe permissions for all tables in the database.
*
*
*
*
*
*
* -
*
* S3_BUCKET_LIMIT_FAILURE - You reached the limit for Amazon S3 buckets. Reduce the number of S3 buckets or request
* a limit increase and try again. For more information, see Bucket Restrictions and
* Limitations in the Amazon Simple Storage Service Developer Guide.
*
*
* -
*
* S3_NOT_SIGNED_UP - Your account is not signed up for the Amazon S3 service. You must sign up before you can use
* Amazon S3. You can sign up at the following URL: https://aws.amazon.com/s3.
*
*
*
*
* @return Contains information about any errors that have occurred. This data type can have the following
* values:
*
* -
*
* ACCESS_DENIED - You don’t have permission to start Data Exploration in Amazon Athena. Contact your Amazon
* Web Services administrator for help. For more information, see Setting Up
* Amazon Web Services Application Discovery Service in the Application Discovery Service User Guide.
*
*
* -
*
* DELIVERY_STREAM_LIMIT_FAILURE - You reached the limit for Amazon Kinesis Data Firehose delivery streams.
* Reduce the number of streams or request a limit increase and try again. For more information, see Kinesis Data Streams
* Limits in the Amazon Kinesis Data Streams Developer Guide.
*
*
* -
*
* FIREHOSE_ROLE_MISSING - The Data Exploration feature is in an error state because your user is missing
* the Amazon Web ServicesApplicationDiscoveryServiceFirehose role. Turn on Data Exploration in Amazon
* Athena and try again. For more information, see Creating the Amazon Web ServicesApplicationDiscoveryServiceFirehose Role in the Application
* Discovery Service User Guide.
*
*
* -
*
* FIREHOSE_STREAM_DOES_NOT_EXIST - The Data Exploration feature is in an error state because your user is
* missing one or more of the Kinesis data delivery streams.
*
*
* -
*
* INTERNAL_FAILURE - The Data Exploration feature is in an error state because of an internal failure. Try
* again later. If this problem persists, contact Amazon Web Services Support.
*
*
* -
*
* LAKE_FORMATION_ACCESS_DENIED - You don't have sufficient lake formation permissions to start continuous
* export. For more information, see Upgrading
* Amazon Web Services Glue Data Permissions to the Amazon Web Services Lake Formation Model in the
* Amazon Web Services Lake Formation Developer Guide.
*
*
* You can use one of the following two ways to resolve this issue.
*
*
* -
*
* If you don’t want to use the Lake Formation permission model, you can change the default Data Catalog
* settings to use only Amazon Web Services Identity and Access Management (IAM) access control for new
* databases. For more information, see Change Data Catalog Settings in the Lake Formation Developer Guide.
*
*
* -
*
* You can give the service-linked IAM roles AWSServiceRoleForApplicationDiscoveryServiceContinuousExport
* and AWSApplicationDiscoveryServiceFirehose the required Lake Formation permissions. For more information,
* see
* Granting Database Permissions in the Lake Formation Developer Guide.
*
*
* -
*
* AWSServiceRoleForApplicationDiscoveryServiceContinuousExport - Grant database creator permissions, which
* gives the role database creation ability and implicit permissions for any created tables. For more
* information, see Implicit Lake
* Formation Permissions in the Lake Formation Developer Guide.
*
*
* -
*
* AWSApplicationDiscoveryServiceFirehose - Grant describe permissions for all tables in the database.
*
*
*
*
*
*
* -
*
* S3_BUCKET_LIMIT_FAILURE - You reached the limit for Amazon S3 buckets. Reduce the number of S3 buckets or
* request a limit increase and try again. For more information, see Bucket Restrictions and
* Limitations in the Amazon Simple Storage Service Developer Guide.
*
*
* -
*
* S3_NOT_SIGNED_UP - Your account is not signed up for the Amazon S3 service. You must sign up before you
* can use Amazon S3. You can sign up at the following URL: https://aws.amazon.com/s3.
*
*
*/
public String getStatusDetail() {
return this.statusDetail;
}
/**
*
* Contains information about any errors that have occurred. This data type can have the following values:
*
*
* -
*
* ACCESS_DENIED - You don’t have permission to start Data Exploration in Amazon Athena. Contact your Amazon Web
* Services administrator for help. For more information, see Setting Up Amazon Web
* Services Application Discovery Service in the Application Discovery Service User Guide.
*
*
* -
*
* DELIVERY_STREAM_LIMIT_FAILURE - You reached the limit for Amazon Kinesis Data Firehose delivery streams. Reduce
* the number of streams or request a limit increase and try again. For more information, see Kinesis Data Streams
* Limits in the Amazon Kinesis Data Streams Developer Guide.
*
*
* -
*
* FIREHOSE_ROLE_MISSING - The Data Exploration feature is in an error state because your user is missing the Amazon
* Web ServicesApplicationDiscoveryServiceFirehose role. Turn on Data Exploration in Amazon Athena and try again.
* For more information, see Creating the Amazon Web ServicesApplicationDiscoveryServiceFirehose Role in the Application Discovery
* Service User Guide.
*
*
* -
*
* FIREHOSE_STREAM_DOES_NOT_EXIST - The Data Exploration feature is in an error state because your user is missing
* one or more of the Kinesis data delivery streams.
*
*
* -
*
* INTERNAL_FAILURE - The Data Exploration feature is in an error state because of an internal failure. Try again
* later. If this problem persists, contact Amazon Web Services Support.
*
*
* -
*
* LAKE_FORMATION_ACCESS_DENIED - You don't have sufficient lake formation permissions to start continuous export.
* For more information, see Upgrading Amazon Web
* Services Glue Data Permissions to the Amazon Web Services Lake Formation Model in the Amazon Web Services
* Lake Formation Developer Guide.
*
*
* You can use one of the following two ways to resolve this issue.
*
*
* -
*
* If you don’t want to use the Lake Formation permission model, you can change the default Data Catalog settings to
* use only Amazon Web Services Identity and Access Management (IAM) access control for new databases. For more
* information, see Change Data Catalog Settings in the Lake Formation Developer Guide.
*
*
* -
*
* You can give the service-linked IAM roles AWSServiceRoleForApplicationDiscoveryServiceContinuousExport and
* AWSApplicationDiscoveryServiceFirehose the required Lake Formation permissions. For more information, see Granting Database
* Permissions in the Lake Formation Developer Guide.
*
*
* -
*
* AWSServiceRoleForApplicationDiscoveryServiceContinuousExport - Grant database creator permissions, which gives
* the role database creation ability and implicit permissions for any created tables. For more information, see Implicit Lake Formation
* Permissions in the Lake Formation Developer Guide.
*
*
* -
*
* AWSApplicationDiscoveryServiceFirehose - Grant describe permissions for all tables in the database.
*
*
*
*
*
*
* -
*
* S3_BUCKET_LIMIT_FAILURE - You reached the limit for Amazon S3 buckets. Reduce the number of S3 buckets or request
* a limit increase and try again. For more information, see Bucket Restrictions and
* Limitations in the Amazon Simple Storage Service Developer Guide.
*
*
* -
*
* S3_NOT_SIGNED_UP - Your account is not signed up for the Amazon S3 service. You must sign up before you can use
* Amazon S3. You can sign up at the following URL: https://aws.amazon.com/s3.
*
*
*
*
* @param statusDetail
* Contains information about any errors that have occurred. This data type can have the following
* values:
*
* -
*
* ACCESS_DENIED - You don’t have permission to start Data Exploration in Amazon Athena. Contact your Amazon
* Web Services administrator for help. For more information, see Setting Up Amazon
* Web Services Application Discovery Service in the Application Discovery Service User Guide.
*
*
* -
*
* DELIVERY_STREAM_LIMIT_FAILURE - You reached the limit for Amazon Kinesis Data Firehose delivery streams.
* Reduce the number of streams or request a limit increase and try again. For more information, see Kinesis Data Streams
* Limits in the Amazon Kinesis Data Streams Developer Guide.
*
*
* -
*
* FIREHOSE_ROLE_MISSING - The Data Exploration feature is in an error state because your user is missing the
* Amazon Web ServicesApplicationDiscoveryServiceFirehose role. Turn on Data Exploration in Amazon Athena and
* try again. For more information, see Creating the Amazon Web ServicesApplicationDiscoveryServiceFirehose Role in the Application Discovery
* Service User Guide.
*
*
* -
*
* FIREHOSE_STREAM_DOES_NOT_EXIST - The Data Exploration feature is in an error state because your user is
* missing one or more of the Kinesis data delivery streams.
*
*
* -
*
* INTERNAL_FAILURE - The Data Exploration feature is in an error state because of an internal failure. Try
* again later. If this problem persists, contact Amazon Web Services Support.
*
*
* -
*
* LAKE_FORMATION_ACCESS_DENIED - You don't have sufficient lake formation permissions to start continuous
* export. For more information, see Upgrading
* Amazon Web Services Glue Data Permissions to the Amazon Web Services Lake Formation Model in the
* Amazon Web Services Lake Formation Developer Guide.
*
*
* You can use one of the following two ways to resolve this issue.
*
*
* -
*
* If you don’t want to use the Lake Formation permission model, you can change the default Data Catalog
* settings to use only Amazon Web Services Identity and Access Management (IAM) access control for new
* databases. For more information, see Change Data Catalog Settings in the Lake Formation Developer Guide.
*
*
* -
*
* You can give the service-linked IAM roles AWSServiceRoleForApplicationDiscoveryServiceContinuousExport and
* AWSApplicationDiscoveryServiceFirehose the required Lake Formation permissions. For more information, see
*
* Granting Database Permissions in the Lake Formation Developer Guide.
*
*
* -
*
* AWSServiceRoleForApplicationDiscoveryServiceContinuousExport - Grant database creator permissions, which
* gives the role database creation ability and implicit permissions for any created tables. For more
* information, see
* Implicit Lake Formation Permissions in the Lake Formation Developer Guide.
*
*
* -
*
* AWSApplicationDiscoveryServiceFirehose - Grant describe permissions for all tables in the database.
*
*
*
*
*
*
* -
*
* S3_BUCKET_LIMIT_FAILURE - You reached the limit for Amazon S3 buckets. Reduce the number of S3 buckets or
* request a limit increase and try again. For more information, see Bucket Restrictions and
* Limitations in the Amazon Simple Storage Service Developer Guide.
*
*
* -
*
* S3_NOT_SIGNED_UP - Your account is not signed up for the Amazon S3 service. You must sign up before you
* can use Amazon S3. You can sign up at the following URL: https://aws.amazon.com/s3.
*
*
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ContinuousExportDescription withStatusDetail(String statusDetail) {
setStatusDetail(statusDetail);
return this;
}
/**
*
* The name of the s3 bucket where the export data parquet files are stored.
*
*
* @param s3Bucket
* The name of the s3 bucket where the export data parquet files are stored.
*/
public void setS3Bucket(String s3Bucket) {
this.s3Bucket = s3Bucket;
}
/**
*
* The name of the s3 bucket where the export data parquet files are stored.
*
*
* @return The name of the s3 bucket where the export data parquet files are stored.
*/
public String getS3Bucket() {
return this.s3Bucket;
}
/**
*
* The name of the s3 bucket where the export data parquet files are stored.
*
*
* @param s3Bucket
* The name of the s3 bucket where the export data parquet files are stored.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ContinuousExportDescription withS3Bucket(String s3Bucket) {
setS3Bucket(s3Bucket);
return this;
}
/**
*
* The timestamp representing when the continuous export was started.
*
*
* @param startTime
* The timestamp representing when the continuous export was started.
*/
public void setStartTime(java.util.Date startTime) {
this.startTime = startTime;
}
/**
*
* The timestamp representing when the continuous export was started.
*
*
* @return The timestamp representing when the continuous export was started.
*/
public java.util.Date getStartTime() {
return this.startTime;
}
/**
*
* The timestamp representing when the continuous export was started.
*
*
* @param startTime
* The timestamp representing when the continuous export was started.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ContinuousExportDescription withStartTime(java.util.Date startTime) {
setStartTime(startTime);
return this;
}
/**
*
* The timestamp that represents when this continuous export was stopped.
*
*
* @param stopTime
* The timestamp that represents when this continuous export was stopped.
*/
public void setStopTime(java.util.Date stopTime) {
this.stopTime = stopTime;
}
/**
*
* The timestamp that represents when this continuous export was stopped.
*
*
* @return The timestamp that represents when this continuous export was stopped.
*/
public java.util.Date getStopTime() {
return this.stopTime;
}
/**
*
* The timestamp that represents when this continuous export was stopped.
*
*
* @param stopTime
* The timestamp that represents when this continuous export was stopped.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ContinuousExportDescription withStopTime(java.util.Date stopTime) {
setStopTime(stopTime);
return this;
}
/**
*
* The type of data collector used to gather this data (currently only offered for AGENT).
*
*
* @param dataSource
* The type of data collector used to gather this data (currently only offered for AGENT).
* @see DataSource
*/
public void setDataSource(String dataSource) {
this.dataSource = dataSource;
}
/**
*
* The type of data collector used to gather this data (currently only offered for AGENT).
*
*
* @return The type of data collector used to gather this data (currently only offered for AGENT).
* @see DataSource
*/
public String getDataSource() {
return this.dataSource;
}
/**
*
* The type of data collector used to gather this data (currently only offered for AGENT).
*
*
* @param dataSource
* The type of data collector used to gather this data (currently only offered for AGENT).
* @return Returns a reference to this object so that method calls can be chained together.
* @see DataSource
*/
public ContinuousExportDescription withDataSource(String dataSource) {
setDataSource(dataSource);
return this;
}
/**
*
* The type of data collector used to gather this data (currently only offered for AGENT).
*
*
* @param dataSource
* The type of data collector used to gather this data (currently only offered for AGENT).
* @return Returns a reference to this object so that method calls can be chained together.
* @see DataSource
*/
public ContinuousExportDescription withDataSource(DataSource dataSource) {
this.dataSource = dataSource.toString();
return this;
}
/**
*
* An object which describes how the data is stored.
*
*
* -
*
* databaseName
- the name of the Glue database used to store the schema.
*
*
*
*
* @return An object which describes how the data is stored.
*
* -
*
* databaseName
- the name of the Glue database used to store the schema.
*
*
*/
public java.util.Map getSchemaStorageConfig() {
return schemaStorageConfig;
}
/**
*
* An object which describes how the data is stored.
*
*
* -
*
* databaseName
- the name of the Glue database used to store the schema.
*
*
*
*
* @param schemaStorageConfig
* An object which describes how the data is stored.
*
* -
*
* databaseName
- the name of the Glue database used to store the schema.
*
*
*/
public void setSchemaStorageConfig(java.util.Map schemaStorageConfig) {
this.schemaStorageConfig = schemaStorageConfig;
}
/**
*
* An object which describes how the data is stored.
*
*
* -
*
* databaseName
- the name of the Glue database used to store the schema.
*
*
*
*
* @param schemaStorageConfig
* An object which describes how the data is stored.
*
* -
*
* databaseName
- the name of the Glue database used to store the schema.
*
*
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ContinuousExportDescription withSchemaStorageConfig(java.util.Map schemaStorageConfig) {
setSchemaStorageConfig(schemaStorageConfig);
return this;
}
/**
* Add a single SchemaStorageConfig entry
*
* @see ContinuousExportDescription#withSchemaStorageConfig
* @returns a reference to this object so that method calls can be chained together.
*/
public ContinuousExportDescription addSchemaStorageConfigEntry(String key, String value) {
if (null == this.schemaStorageConfig) {
this.schemaStorageConfig = new java.util.HashMap();
}
if (this.schemaStorageConfig.containsKey(key))
throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided.");
this.schemaStorageConfig.put(key, value);
return this;
}
/**
* Removes all the entries added into SchemaStorageConfig.
*
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ContinuousExportDescription clearSchemaStorageConfigEntries() {
this.schemaStorageConfig = null;
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getExportId() != null)
sb.append("ExportId: ").append(getExportId()).append(",");
if (getStatus() != null)
sb.append("Status: ").append(getStatus()).append(",");
if (getStatusDetail() != null)
sb.append("StatusDetail: ").append(getStatusDetail()).append(",");
if (getS3Bucket() != null)
sb.append("S3Bucket: ").append(getS3Bucket()).append(",");
if (getStartTime() != null)
sb.append("StartTime: ").append(getStartTime()).append(",");
if (getStopTime() != null)
sb.append("StopTime: ").append(getStopTime()).append(",");
if (getDataSource() != null)
sb.append("DataSource: ").append(getDataSource()).append(",");
if (getSchemaStorageConfig() != null)
sb.append("SchemaStorageConfig: ").append(getSchemaStorageConfig());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ContinuousExportDescription == false)
return false;
ContinuousExportDescription other = (ContinuousExportDescription) obj;
if (other.getExportId() == null ^ this.getExportId() == null)
return false;
if (other.getExportId() != null && other.getExportId().equals(this.getExportId()) == false)
return false;
if (other.getStatus() == null ^ this.getStatus() == null)
return false;
if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false)
return false;
if (other.getStatusDetail() == null ^ this.getStatusDetail() == null)
return false;
if (other.getStatusDetail() != null && other.getStatusDetail().equals(this.getStatusDetail()) == false)
return false;
if (other.getS3Bucket() == null ^ this.getS3Bucket() == null)
return false;
if (other.getS3Bucket() != null && other.getS3Bucket().equals(this.getS3Bucket()) == false)
return false;
if (other.getStartTime() == null ^ this.getStartTime() == null)
return false;
if (other.getStartTime() != null && other.getStartTime().equals(this.getStartTime()) == false)
return false;
if (other.getStopTime() == null ^ this.getStopTime() == null)
return false;
if (other.getStopTime() != null && other.getStopTime().equals(this.getStopTime()) == false)
return false;
if (other.getDataSource() == null ^ this.getDataSource() == null)
return false;
if (other.getDataSource() != null && other.getDataSource().equals(this.getDataSource()) == false)
return false;
if (other.getSchemaStorageConfig() == null ^ this.getSchemaStorageConfig() == null)
return false;
if (other.getSchemaStorageConfig() != null && other.getSchemaStorageConfig().equals(this.getSchemaStorageConfig()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getExportId() == null) ? 0 : getExportId().hashCode());
hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode());
hashCode = prime * hashCode + ((getStatusDetail() == null) ? 0 : getStatusDetail().hashCode());
hashCode = prime * hashCode + ((getS3Bucket() == null) ? 0 : getS3Bucket().hashCode());
hashCode = prime * hashCode + ((getStartTime() == null) ? 0 : getStartTime().hashCode());
hashCode = prime * hashCode + ((getStopTime() == null) ? 0 : getStopTime().hashCode());
hashCode = prime * hashCode + ((getDataSource() == null) ? 0 : getDataSource().hashCode());
hashCode = prime * hashCode + ((getSchemaStorageConfig() == null) ? 0 : getSchemaStorageConfig().hashCode());
return hashCode;
}
@Override
public ContinuousExportDescription clone() {
try {
return (ContinuousExportDescription) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.applicationdiscovery.model.transform.ContinuousExportDescriptionMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}