software.amazon.awssdk.services.redshiftdata.DefaultRedshiftDataClient Maven / Gradle / Ivy
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package software.amazon.awssdk.services.redshiftdata;
import java.util.Collections;
import java.util.List;
import java.util.function.Consumer;
import software.amazon.awssdk.annotations.Generated;
import software.amazon.awssdk.annotations.SdkInternalApi;
import software.amazon.awssdk.awscore.client.handler.AwsSyncClientHandler;
import software.amazon.awssdk.awscore.exception.AwsServiceException;
import software.amazon.awssdk.awscore.internal.AwsProtocolMetadata;
import software.amazon.awssdk.awscore.internal.AwsServiceProtocol;
import software.amazon.awssdk.awscore.retry.AwsRetryStrategy;
import software.amazon.awssdk.core.RequestOverrideConfiguration;
import software.amazon.awssdk.core.SdkPlugin;
import software.amazon.awssdk.core.SdkRequest;
import software.amazon.awssdk.core.client.config.ClientOverrideConfiguration;
import software.amazon.awssdk.core.client.config.SdkClientConfiguration;
import software.amazon.awssdk.core.client.config.SdkClientOption;
import software.amazon.awssdk.core.client.handler.ClientExecutionParams;
import software.amazon.awssdk.core.client.handler.SyncClientHandler;
import software.amazon.awssdk.core.exception.SdkClientException;
import software.amazon.awssdk.core.http.HttpResponseHandler;
import software.amazon.awssdk.core.metrics.CoreMetric;
import software.amazon.awssdk.core.retry.RetryMode;
import software.amazon.awssdk.metrics.MetricCollector;
import software.amazon.awssdk.metrics.MetricPublisher;
import software.amazon.awssdk.metrics.NoOpMetricCollector;
import software.amazon.awssdk.protocols.core.ExceptionMetadata;
import software.amazon.awssdk.protocols.json.AwsJsonProtocol;
import software.amazon.awssdk.protocols.json.AwsJsonProtocolFactory;
import software.amazon.awssdk.protocols.json.BaseAwsJsonProtocolFactory;
import software.amazon.awssdk.protocols.json.JsonOperationMetadata;
import software.amazon.awssdk.retries.api.RetryStrategy;
import software.amazon.awssdk.services.redshiftdata.internal.RedshiftDataServiceClientConfigurationBuilder;
import software.amazon.awssdk.services.redshiftdata.model.ActiveStatementsExceededException;
import software.amazon.awssdk.services.redshiftdata.model.BatchExecuteStatementException;
import software.amazon.awssdk.services.redshiftdata.model.BatchExecuteStatementRequest;
import software.amazon.awssdk.services.redshiftdata.model.BatchExecuteStatementResponse;
import software.amazon.awssdk.services.redshiftdata.model.CancelStatementRequest;
import software.amazon.awssdk.services.redshiftdata.model.CancelStatementResponse;
import software.amazon.awssdk.services.redshiftdata.model.DatabaseConnectionException;
import software.amazon.awssdk.services.redshiftdata.model.DescribeStatementRequest;
import software.amazon.awssdk.services.redshiftdata.model.DescribeStatementResponse;
import software.amazon.awssdk.services.redshiftdata.model.DescribeTableRequest;
import software.amazon.awssdk.services.redshiftdata.model.DescribeTableResponse;
import software.amazon.awssdk.services.redshiftdata.model.ExecuteStatementException;
import software.amazon.awssdk.services.redshiftdata.model.ExecuteStatementRequest;
import software.amazon.awssdk.services.redshiftdata.model.ExecuteStatementResponse;
import software.amazon.awssdk.services.redshiftdata.model.GetStatementResultRequest;
import software.amazon.awssdk.services.redshiftdata.model.GetStatementResultResponse;
import software.amazon.awssdk.services.redshiftdata.model.InternalServerException;
import software.amazon.awssdk.services.redshiftdata.model.ListDatabasesRequest;
import software.amazon.awssdk.services.redshiftdata.model.ListDatabasesResponse;
import software.amazon.awssdk.services.redshiftdata.model.ListSchemasRequest;
import software.amazon.awssdk.services.redshiftdata.model.ListSchemasResponse;
import software.amazon.awssdk.services.redshiftdata.model.ListStatementsRequest;
import software.amazon.awssdk.services.redshiftdata.model.ListStatementsResponse;
import software.amazon.awssdk.services.redshiftdata.model.ListTablesRequest;
import software.amazon.awssdk.services.redshiftdata.model.ListTablesResponse;
import software.amazon.awssdk.services.redshiftdata.model.RedshiftDataException;
import software.amazon.awssdk.services.redshiftdata.model.ResourceNotFoundException;
import software.amazon.awssdk.services.redshiftdata.model.ValidationException;
import software.amazon.awssdk.services.redshiftdata.transform.BatchExecuteStatementRequestMarshaller;
import software.amazon.awssdk.services.redshiftdata.transform.CancelStatementRequestMarshaller;
import software.amazon.awssdk.services.redshiftdata.transform.DescribeStatementRequestMarshaller;
import software.amazon.awssdk.services.redshiftdata.transform.DescribeTableRequestMarshaller;
import software.amazon.awssdk.services.redshiftdata.transform.ExecuteStatementRequestMarshaller;
import software.amazon.awssdk.services.redshiftdata.transform.GetStatementResultRequestMarshaller;
import software.amazon.awssdk.services.redshiftdata.transform.ListDatabasesRequestMarshaller;
import software.amazon.awssdk.services.redshiftdata.transform.ListSchemasRequestMarshaller;
import software.amazon.awssdk.services.redshiftdata.transform.ListStatementsRequestMarshaller;
import software.amazon.awssdk.services.redshiftdata.transform.ListTablesRequestMarshaller;
import software.amazon.awssdk.utils.Logger;
/**
* Internal implementation of {@link RedshiftDataClient}.
*
* @see RedshiftDataClient#builder()
*/
@Generated("software.amazon.awssdk:codegen")
@SdkInternalApi
final class DefaultRedshiftDataClient implements RedshiftDataClient {
private static final Logger log = Logger.loggerFor(DefaultRedshiftDataClient.class);
private static final AwsProtocolMetadata protocolMetadata = AwsProtocolMetadata.builder()
.serviceProtocol(AwsServiceProtocol.AWS_JSON).build();
private final SyncClientHandler clientHandler;
private final AwsJsonProtocolFactory protocolFactory;
private final SdkClientConfiguration clientConfiguration;
protected DefaultRedshiftDataClient(SdkClientConfiguration clientConfiguration) {
this.clientHandler = new AwsSyncClientHandler(clientConfiguration);
this.clientConfiguration = clientConfiguration.toBuilder().option(SdkClientOption.SDK_CLIENT, this).build();
this.protocolFactory = init(AwsJsonProtocolFactory.builder()).build();
}
/**
*
* Runs one or more SQL statements, which can be data manipulation language (DML) or data definition language (DDL).
* Depending on the authorization method, use one of the following combinations of request parameters:
*
*
* -
*
* Secrets Manager - when connecting to a cluster, provide the secret-arn
of a secret stored in Secrets
* Manager which has username
and password
. The specified secret contains credentials to
* connect to the database
you specify. When you are connecting to a cluster, you also supply the
* database name, If you provide a cluster identifier (dbClusterIdentifier
), it must match the cluster
* identifier stored in the secret. When you are connecting to a serverless workgroup, you also supply the database
* name.
*
*
* -
*
* Temporary credentials - when connecting to your data warehouse, choose one of the following options:
*
*
* -
*
* When connecting to a serverless workgroup, specify the workgroup name and database name. The database user name
* is derived from the IAM identity. For example, arn:iam::123456789012:user:foo
has the database user
* name IAM:foo
. Also, permission to call the redshift-serverless:GetCredentials
operation
* is required.
*
*
* -
*
* When connecting to a cluster as an IAM identity, specify the cluster identifier and the database name. The
* database user name is derived from the IAM identity. For example, arn:iam::123456789012:user:foo
has
* the database user name IAM:foo
. Also, permission to call the
* redshift:GetClusterCredentialsWithIAM
operation is required.
*
*
* -
*
* When connecting to a cluster as a database user, specify the cluster identifier, the database name, and the
* database user name. Also, permission to call the redshift:GetClusterCredentials
operation is
* required.
*
*
*
*
*
*
* For more information about the Amazon Redshift Data API and CLI usage examples, see Using the Amazon Redshift Data API in
* the Amazon Redshift Management Guide.
*
*
* @param batchExecuteStatementRequest
* @return Result of the BatchExecuteStatement operation returned by the service.
* @throws ValidationException
* The Amazon Redshift Data API operation failed due to invalid input.
* @throws ActiveStatementsExceededException
* The number of active statements exceeds the limit.
* @throws BatchExecuteStatementException
* An SQL statement encountered an environmental error while running.
* @throws SdkException
* Base class for all exceptions that can be thrown by the SDK (both service and client). Can be used for
* catch all scenarios.
* @throws SdkClientException
* If any client side error occurs such as an IO related failure, failure to get credentials, etc.
* @throws RedshiftDataException
* Base class for all service exceptions. Unknown exceptions will be thrown as an instance of this type.
* @sample RedshiftDataClient.BatchExecuteStatement
* @see AWS API Documentation
*/
@Override
public BatchExecuteStatementResponse batchExecuteStatement(BatchExecuteStatementRequest batchExecuteStatementRequest)
throws ValidationException, ActiveStatementsExceededException, BatchExecuteStatementException, AwsServiceException,
SdkClientException, RedshiftDataException {
JsonOperationMetadata operationMetadata = JsonOperationMetadata.builder().hasStreamingSuccessResponse(false)
.isPayloadJson(true).build();
HttpResponseHandler responseHandler = protocolFactory.createResponseHandler(
operationMetadata, BatchExecuteStatementResponse::builder);
HttpResponseHandler errorResponseHandler = createErrorResponseHandler(protocolFactory,
operationMetadata);
SdkClientConfiguration clientConfiguration = updateSdkClientConfiguration(batchExecuteStatementRequest,
this.clientConfiguration);
List metricPublishers = resolveMetricPublishers(clientConfiguration, batchExecuteStatementRequest
.overrideConfiguration().orElse(null));
MetricCollector apiCallMetricCollector = metricPublishers.isEmpty() ? NoOpMetricCollector.create() : MetricCollector
.create("ApiCall");
try {
apiCallMetricCollector.reportMetric(CoreMetric.SERVICE_ID, "Redshift Data");
apiCallMetricCollector.reportMetric(CoreMetric.OPERATION_NAME, "BatchExecuteStatement");
return clientHandler.execute(new ClientExecutionParams()
.withOperationName("BatchExecuteStatement").withProtocolMetadata(protocolMetadata)
.withResponseHandler(responseHandler).withErrorResponseHandler(errorResponseHandler)
.withRequestConfiguration(clientConfiguration).withInput(batchExecuteStatementRequest)
.withMetricCollector(apiCallMetricCollector)
.withMarshaller(new BatchExecuteStatementRequestMarshaller(protocolFactory)));
} finally {
metricPublishers.forEach(p -> p.publish(apiCallMetricCollector.collect()));
}
}
/**
*
* Cancels a running query. To be canceled, a query must be running.
*
*
* For more information about the Amazon Redshift Data API and CLI usage examples, see Using the Amazon Redshift Data API in
* the Amazon Redshift Management Guide.
*
*
* @param cancelStatementRequest
* @return Result of the CancelStatement operation returned by the service.
* @throws ValidationException
* The Amazon Redshift Data API operation failed due to invalid input.
* @throws ResourceNotFoundException
* The Amazon Redshift Data API operation failed due to a missing resource.
* @throws InternalServerException
* The Amazon Redshift Data API operation failed due to invalid input.
* @throws DatabaseConnectionException
* Connection to a database failed.
* @throws SdkException
* Base class for all exceptions that can be thrown by the SDK (both service and client). Can be used for
* catch all scenarios.
* @throws SdkClientException
* If any client side error occurs such as an IO related failure, failure to get credentials, etc.
* @throws RedshiftDataException
* Base class for all service exceptions. Unknown exceptions will be thrown as an instance of this type.
* @sample RedshiftDataClient.CancelStatement
* @see AWS
* API Documentation
*/
@Override
public CancelStatementResponse cancelStatement(CancelStatementRequest cancelStatementRequest) throws ValidationException,
ResourceNotFoundException, InternalServerException, DatabaseConnectionException, AwsServiceException,
SdkClientException, RedshiftDataException {
JsonOperationMetadata operationMetadata = JsonOperationMetadata.builder().hasStreamingSuccessResponse(false)
.isPayloadJson(true).build();
HttpResponseHandler responseHandler = protocolFactory.createResponseHandler(operationMetadata,
CancelStatementResponse::builder);
HttpResponseHandler errorResponseHandler = createErrorResponseHandler(protocolFactory,
operationMetadata);
SdkClientConfiguration clientConfiguration = updateSdkClientConfiguration(cancelStatementRequest,
this.clientConfiguration);
List metricPublishers = resolveMetricPublishers(clientConfiguration, cancelStatementRequest
.overrideConfiguration().orElse(null));
MetricCollector apiCallMetricCollector = metricPublishers.isEmpty() ? NoOpMetricCollector.create() : MetricCollector
.create("ApiCall");
try {
apiCallMetricCollector.reportMetric(CoreMetric.SERVICE_ID, "Redshift Data");
apiCallMetricCollector.reportMetric(CoreMetric.OPERATION_NAME, "CancelStatement");
return clientHandler.execute(new ClientExecutionParams()
.withOperationName("CancelStatement").withProtocolMetadata(protocolMetadata)
.withResponseHandler(responseHandler).withErrorResponseHandler(errorResponseHandler)
.withRequestConfiguration(clientConfiguration).withInput(cancelStatementRequest)
.withMetricCollector(apiCallMetricCollector)
.withMarshaller(new CancelStatementRequestMarshaller(protocolFactory)));
} finally {
metricPublishers.forEach(p -> p.publish(apiCallMetricCollector.collect()));
}
}
/**
*
* Describes the details about a specific instance when a query was run by the Amazon Redshift Data API. The
* information includes when the query started, when it finished, the query status, the number of rows returned, and
* the SQL statement.
*
*
* For more information about the Amazon Redshift Data API and CLI usage examples, see Using the Amazon Redshift Data API in
* the Amazon Redshift Management Guide.
*
*
* @param describeStatementRequest
* @return Result of the DescribeStatement operation returned by the service.
* @throws ValidationException
* The Amazon Redshift Data API operation failed due to invalid input.
* @throws ResourceNotFoundException
* The Amazon Redshift Data API operation failed due to a missing resource.
* @throws InternalServerException
* The Amazon Redshift Data API operation failed due to invalid input.
* @throws SdkException
* Base class for all exceptions that can be thrown by the SDK (both service and client). Can be used for
* catch all scenarios.
* @throws SdkClientException
* If any client side error occurs such as an IO related failure, failure to get credentials, etc.
* @throws RedshiftDataException
* Base class for all service exceptions. Unknown exceptions will be thrown as an instance of this type.
* @sample RedshiftDataClient.DescribeStatement
* @see AWS API Documentation
*/
@Override
public DescribeStatementResponse describeStatement(DescribeStatementRequest describeStatementRequest)
throws ValidationException, ResourceNotFoundException, InternalServerException, AwsServiceException,
SdkClientException, RedshiftDataException {
JsonOperationMetadata operationMetadata = JsonOperationMetadata.builder().hasStreamingSuccessResponse(false)
.isPayloadJson(true).build();
HttpResponseHandler responseHandler = protocolFactory.createResponseHandler(operationMetadata,
DescribeStatementResponse::builder);
HttpResponseHandler errorResponseHandler = createErrorResponseHandler(protocolFactory,
operationMetadata);
SdkClientConfiguration clientConfiguration = updateSdkClientConfiguration(describeStatementRequest,
this.clientConfiguration);
List metricPublishers = resolveMetricPublishers(clientConfiguration, describeStatementRequest
.overrideConfiguration().orElse(null));
MetricCollector apiCallMetricCollector = metricPublishers.isEmpty() ? NoOpMetricCollector.create() : MetricCollector
.create("ApiCall");
try {
apiCallMetricCollector.reportMetric(CoreMetric.SERVICE_ID, "Redshift Data");
apiCallMetricCollector.reportMetric(CoreMetric.OPERATION_NAME, "DescribeStatement");
return clientHandler.execute(new ClientExecutionParams()
.withOperationName("DescribeStatement").withProtocolMetadata(protocolMetadata)
.withResponseHandler(responseHandler).withErrorResponseHandler(errorResponseHandler)
.withRequestConfiguration(clientConfiguration).withInput(describeStatementRequest)
.withMetricCollector(apiCallMetricCollector)
.withMarshaller(new DescribeStatementRequestMarshaller(protocolFactory)));
} finally {
metricPublishers.forEach(p -> p.publish(apiCallMetricCollector.collect()));
}
}
/**
*
* Describes the detailed information about a table from metadata in the cluster. The information includes its
* columns. A token is returned to page through the column list. Depending on the authorization method, use one of
* the following combinations of request parameters:
*
*
* -
*
* Secrets Manager - when connecting to a cluster, provide the secret-arn
of a secret stored in Secrets
* Manager which has username
and password
. The specified secret contains credentials to
* connect to the database
you specify. When you are connecting to a cluster, you also supply the
* database name, If you provide a cluster identifier (dbClusterIdentifier
), it must match the cluster
* identifier stored in the secret. When you are connecting to a serverless workgroup, you also supply the database
* name.
*
*
* -
*
* Temporary credentials - when connecting to your data warehouse, choose one of the following options:
*
*
* -
*
* When connecting to a serverless workgroup, specify the workgroup name and database name. The database user name
* is derived from the IAM identity. For example, arn:iam::123456789012:user:foo
has the database user
* name IAM:foo
. Also, permission to call the redshift-serverless:GetCredentials
operation
* is required.
*
*
* -
*
* When connecting to a cluster as an IAM identity, specify the cluster identifier and the database name. The
* database user name is derived from the IAM identity. For example, arn:iam::123456789012:user:foo
has
* the database user name IAM:foo
. Also, permission to call the
* redshift:GetClusterCredentialsWithIAM
operation is required.
*
*
* -
*
* When connecting to a cluster as a database user, specify the cluster identifier, the database name, and the
* database user name. Also, permission to call the redshift:GetClusterCredentials
operation is
* required.
*
*
*
*
*
*
* For more information about the Amazon Redshift Data API and CLI usage examples, see Using the Amazon Redshift Data API in
* the Amazon Redshift Management Guide.
*
*
* @param describeTableRequest
* @return Result of the DescribeTable operation returned by the service.
* @throws ValidationException
* The Amazon Redshift Data API operation failed due to invalid input.
* @throws InternalServerException
* The Amazon Redshift Data API operation failed due to invalid input.
* @throws DatabaseConnectionException
* Connection to a database failed.
* @throws SdkException
* Base class for all exceptions that can be thrown by the SDK (both service and client). Can be used for
* catch all scenarios.
* @throws SdkClientException
* If any client side error occurs such as an IO related failure, failure to get credentials, etc.
* @throws RedshiftDataException
* Base class for all service exceptions. Unknown exceptions will be thrown as an instance of this type.
* @sample RedshiftDataClient.DescribeTable
* @see AWS
* API Documentation
*/
@Override
public DescribeTableResponse describeTable(DescribeTableRequest describeTableRequest) throws ValidationException,
InternalServerException, DatabaseConnectionException, AwsServiceException, SdkClientException, RedshiftDataException {
JsonOperationMetadata operationMetadata = JsonOperationMetadata.builder().hasStreamingSuccessResponse(false)
.isPayloadJson(true).build();
HttpResponseHandler responseHandler = protocolFactory.createResponseHandler(operationMetadata,
DescribeTableResponse::builder);
HttpResponseHandler errorResponseHandler = createErrorResponseHandler(protocolFactory,
operationMetadata);
SdkClientConfiguration clientConfiguration = updateSdkClientConfiguration(describeTableRequest, this.clientConfiguration);
List metricPublishers = resolveMetricPublishers(clientConfiguration, describeTableRequest
.overrideConfiguration().orElse(null));
MetricCollector apiCallMetricCollector = metricPublishers.isEmpty() ? NoOpMetricCollector.create() : MetricCollector
.create("ApiCall");
try {
apiCallMetricCollector.reportMetric(CoreMetric.SERVICE_ID, "Redshift Data");
apiCallMetricCollector.reportMetric(CoreMetric.OPERATION_NAME, "DescribeTable");
return clientHandler.execute(new ClientExecutionParams()
.withOperationName("DescribeTable").withProtocolMetadata(protocolMetadata)
.withResponseHandler(responseHandler).withErrorResponseHandler(errorResponseHandler)
.withRequestConfiguration(clientConfiguration).withInput(describeTableRequest)
.withMetricCollector(apiCallMetricCollector)
.withMarshaller(new DescribeTableRequestMarshaller(protocolFactory)));
} finally {
metricPublishers.forEach(p -> p.publish(apiCallMetricCollector.collect()));
}
}
/**
*
* Runs an SQL statement, which can be data manipulation language (DML) or data definition language (DDL). This
* statement must be a single SQL statement. Depending on the authorization method, use one of the following
* combinations of request parameters:
*
*
* -
*
* Secrets Manager - when connecting to a cluster, provide the secret-arn
of a secret stored in Secrets
* Manager which has username
and password
. The specified secret contains credentials to
* connect to the database
you specify. When you are connecting to a cluster, you also supply the
* database name, If you provide a cluster identifier (dbClusterIdentifier
), it must match the cluster
* identifier stored in the secret. When you are connecting to a serverless workgroup, you also supply the database
* name.
*
*
* -
*
* Temporary credentials - when connecting to your data warehouse, choose one of the following options:
*
*
* -
*
* When connecting to a serverless workgroup, specify the workgroup name and database name. The database user name
* is derived from the IAM identity. For example, arn:iam::123456789012:user:foo
has the database user
* name IAM:foo
. Also, permission to call the redshift-serverless:GetCredentials
operation
* is required.
*
*
* -
*
* When connecting to a cluster as an IAM identity, specify the cluster identifier and the database name. The
* database user name is derived from the IAM identity. For example, arn:iam::123456789012:user:foo
has
* the database user name IAM:foo
. Also, permission to call the
* redshift:GetClusterCredentialsWithIAM
operation is required.
*
*
* -
*
* When connecting to a cluster as a database user, specify the cluster identifier, the database name, and the
* database user name. Also, permission to call the redshift:GetClusterCredentials
operation is
* required.
*
*
*
*
*
*
* For more information about the Amazon Redshift Data API and CLI usage examples, see Using the Amazon Redshift Data API in
* the Amazon Redshift Management Guide.
*
*
* @param executeStatementRequest
* @return Result of the ExecuteStatement operation returned by the service.
* @throws ValidationException
* The Amazon Redshift Data API operation failed due to invalid input.
* @throws ExecuteStatementException
* The SQL statement encountered an environmental error while running.
* @throws ActiveStatementsExceededException
* The number of active statements exceeds the limit.
* @throws SdkException
* Base class for all exceptions that can be thrown by the SDK (both service and client). Can be used for
* catch all scenarios.
* @throws SdkClientException
* If any client side error occurs such as an IO related failure, failure to get credentials, etc.
* @throws RedshiftDataException
* Base class for all service exceptions. Unknown exceptions will be thrown as an instance of this type.
* @sample RedshiftDataClient.ExecuteStatement
* @see AWS API Documentation
*/
@Override
public ExecuteStatementResponse executeStatement(ExecuteStatementRequest executeStatementRequest) throws ValidationException,
ExecuteStatementException, ActiveStatementsExceededException, AwsServiceException, SdkClientException,
RedshiftDataException {
JsonOperationMetadata operationMetadata = JsonOperationMetadata.builder().hasStreamingSuccessResponse(false)
.isPayloadJson(true).build();
HttpResponseHandler responseHandler = protocolFactory.createResponseHandler(operationMetadata,
ExecuteStatementResponse::builder);
HttpResponseHandler errorResponseHandler = createErrorResponseHandler(protocolFactory,
operationMetadata);
SdkClientConfiguration clientConfiguration = updateSdkClientConfiguration(executeStatementRequest,
this.clientConfiguration);
List metricPublishers = resolveMetricPublishers(clientConfiguration, executeStatementRequest
.overrideConfiguration().orElse(null));
MetricCollector apiCallMetricCollector = metricPublishers.isEmpty() ? NoOpMetricCollector.create() : MetricCollector
.create("ApiCall");
try {
apiCallMetricCollector.reportMetric(CoreMetric.SERVICE_ID, "Redshift Data");
apiCallMetricCollector.reportMetric(CoreMetric.OPERATION_NAME, "ExecuteStatement");
return clientHandler.execute(new ClientExecutionParams()
.withOperationName("ExecuteStatement").withProtocolMetadata(protocolMetadata)
.withResponseHandler(responseHandler).withErrorResponseHandler(errorResponseHandler)
.withRequestConfiguration(clientConfiguration).withInput(executeStatementRequest)
.withMetricCollector(apiCallMetricCollector)
.withMarshaller(new ExecuteStatementRequestMarshaller(protocolFactory)));
} finally {
metricPublishers.forEach(p -> p.publish(apiCallMetricCollector.collect()));
}
}
/**
*
* Fetches the temporarily cached result of an SQL statement. A token is returned to page through the statement
* results.
*
*
* For more information about the Amazon Redshift Data API and CLI usage examples, see Using the Amazon Redshift Data API in
* the Amazon Redshift Management Guide.
*
*
* @param getStatementResultRequest
* @return Result of the GetStatementResult operation returned by the service.
* @throws ValidationException
* The Amazon Redshift Data API operation failed due to invalid input.
* @throws ResourceNotFoundException
* The Amazon Redshift Data API operation failed due to a missing resource.
* @throws InternalServerException
* The Amazon Redshift Data API operation failed due to invalid input.
* @throws SdkException
* Base class for all exceptions that can be thrown by the SDK (both service and client). Can be used for
* catch all scenarios.
* @throws SdkClientException
* If any client side error occurs such as an IO related failure, failure to get credentials, etc.
* @throws RedshiftDataException
* Base class for all service exceptions. Unknown exceptions will be thrown as an instance of this type.
* @sample RedshiftDataClient.GetStatementResult
* @see AWS API Documentation
*/
@Override
public GetStatementResultResponse getStatementResult(GetStatementResultRequest getStatementResultRequest)
throws ValidationException, ResourceNotFoundException, InternalServerException, AwsServiceException,
SdkClientException, RedshiftDataException {
JsonOperationMetadata operationMetadata = JsonOperationMetadata.builder().hasStreamingSuccessResponse(false)
.isPayloadJson(true).build();
HttpResponseHandler responseHandler = protocolFactory.createResponseHandler(
operationMetadata, GetStatementResultResponse::builder);
HttpResponseHandler errorResponseHandler = createErrorResponseHandler(protocolFactory,
operationMetadata);
SdkClientConfiguration clientConfiguration = updateSdkClientConfiguration(getStatementResultRequest,
this.clientConfiguration);
List metricPublishers = resolveMetricPublishers(clientConfiguration, getStatementResultRequest
.overrideConfiguration().orElse(null));
MetricCollector apiCallMetricCollector = metricPublishers.isEmpty() ? NoOpMetricCollector.create() : MetricCollector
.create("ApiCall");
try {
apiCallMetricCollector.reportMetric(CoreMetric.SERVICE_ID, "Redshift Data");
apiCallMetricCollector.reportMetric(CoreMetric.OPERATION_NAME, "GetStatementResult");
return clientHandler.execute(new ClientExecutionParams()
.withOperationName("GetStatementResult").withProtocolMetadata(protocolMetadata)
.withResponseHandler(responseHandler).withErrorResponseHandler(errorResponseHandler)
.withRequestConfiguration(clientConfiguration).withInput(getStatementResultRequest)
.withMetricCollector(apiCallMetricCollector)
.withMarshaller(new GetStatementResultRequestMarshaller(protocolFactory)));
} finally {
metricPublishers.forEach(p -> p.publish(apiCallMetricCollector.collect()));
}
}
/**
*
* List the databases in a cluster. A token is returned to page through the database list. Depending on the
* authorization method, use one of the following combinations of request parameters:
*
*
* -
*
* Secrets Manager - when connecting to a cluster, provide the secret-arn
of a secret stored in Secrets
* Manager which has username
and password
. The specified secret contains credentials to
* connect to the database
you specify. When you are connecting to a cluster, you also supply the
* database name, If you provide a cluster identifier (dbClusterIdentifier
), it must match the cluster
* identifier stored in the secret. When you are connecting to a serverless workgroup, you also supply the database
* name.
*
*
* -
*
* Temporary credentials - when connecting to your data warehouse, choose one of the following options:
*
*
* -
*
* When connecting to a serverless workgroup, specify the workgroup name and database name. The database user name
* is derived from the IAM identity. For example, arn:iam::123456789012:user:foo
has the database user
* name IAM:foo
. Also, permission to call the redshift-serverless:GetCredentials
operation
* is required.
*
*
* -
*
* When connecting to a cluster as an IAM identity, specify the cluster identifier and the database name. The
* database user name is derived from the IAM identity. For example, arn:iam::123456789012:user:foo
has
* the database user name IAM:foo
. Also, permission to call the
* redshift:GetClusterCredentialsWithIAM
operation is required.
*
*
* -
*
* When connecting to a cluster as a database user, specify the cluster identifier, the database name, and the
* database user name. Also, permission to call the redshift:GetClusterCredentials
operation is
* required.
*
*
*
*
*
*
* For more information about the Amazon Redshift Data API and CLI usage examples, see Using the Amazon Redshift Data API in
* the Amazon Redshift Management Guide.
*
*
* @param listDatabasesRequest
* @return Result of the ListDatabases operation returned by the service.
* @throws ValidationException
* The Amazon Redshift Data API operation failed due to invalid input.
* @throws InternalServerException
* The Amazon Redshift Data API operation failed due to invalid input.
* @throws DatabaseConnectionException
* Connection to a database failed.
* @throws SdkException
* Base class for all exceptions that can be thrown by the SDK (both service and client). Can be used for
* catch all scenarios.
* @throws SdkClientException
* If any client side error occurs such as an IO related failure, failure to get credentials, etc.
* @throws RedshiftDataException
* Base class for all service exceptions. Unknown exceptions will be thrown as an instance of this type.
* @sample RedshiftDataClient.ListDatabases
* @see AWS
* API Documentation
*/
@Override
public ListDatabasesResponse listDatabases(ListDatabasesRequest listDatabasesRequest) throws ValidationException,
InternalServerException, DatabaseConnectionException, AwsServiceException, SdkClientException, RedshiftDataException {
JsonOperationMetadata operationMetadata = JsonOperationMetadata.builder().hasStreamingSuccessResponse(false)
.isPayloadJson(true).build();
HttpResponseHandler responseHandler = protocolFactory.createResponseHandler(operationMetadata,
ListDatabasesResponse::builder);
HttpResponseHandler errorResponseHandler = createErrorResponseHandler(protocolFactory,
operationMetadata);
SdkClientConfiguration clientConfiguration = updateSdkClientConfiguration(listDatabasesRequest, this.clientConfiguration);
List metricPublishers = resolveMetricPublishers(clientConfiguration, listDatabasesRequest
.overrideConfiguration().orElse(null));
MetricCollector apiCallMetricCollector = metricPublishers.isEmpty() ? NoOpMetricCollector.create() : MetricCollector
.create("ApiCall");
try {
apiCallMetricCollector.reportMetric(CoreMetric.SERVICE_ID, "Redshift Data");
apiCallMetricCollector.reportMetric(CoreMetric.OPERATION_NAME, "ListDatabases");
return clientHandler.execute(new ClientExecutionParams()
.withOperationName("ListDatabases").withProtocolMetadata(protocolMetadata)
.withResponseHandler(responseHandler).withErrorResponseHandler(errorResponseHandler)
.withRequestConfiguration(clientConfiguration).withInput(listDatabasesRequest)
.withMetricCollector(apiCallMetricCollector)
.withMarshaller(new ListDatabasesRequestMarshaller(protocolFactory)));
} finally {
metricPublishers.forEach(p -> p.publish(apiCallMetricCollector.collect()));
}
}
/**
*
* Lists the schemas in a database. A token is returned to page through the schema list. Depending on the
* authorization method, use one of the following combinations of request parameters:
*
*
* -
*
* Secrets Manager - when connecting to a cluster, provide the secret-arn
of a secret stored in Secrets
* Manager which has username
and password
. The specified secret contains credentials to
* connect to the database
you specify. When you are connecting to a cluster, you also supply the
* database name, If you provide a cluster identifier (dbClusterIdentifier
), it must match the cluster
* identifier stored in the secret. When you are connecting to a serverless workgroup, you also supply the database
* name.
*
*
* -
*
* Temporary credentials - when connecting to your data warehouse, choose one of the following options:
*
*
* -
*
* When connecting to a serverless workgroup, specify the workgroup name and database name. The database user name
* is derived from the IAM identity. For example, arn:iam::123456789012:user:foo
has the database user
* name IAM:foo
. Also, permission to call the redshift-serverless:GetCredentials
operation
* is required.
*
*
* -
*
* When connecting to a cluster as an IAM identity, specify the cluster identifier and the database name. The
* database user name is derived from the IAM identity. For example, arn:iam::123456789012:user:foo
has
* the database user name IAM:foo
. Also, permission to call the
* redshift:GetClusterCredentialsWithIAM
operation is required.
*
*
* -
*
* When connecting to a cluster as a database user, specify the cluster identifier, the database name, and the
* database user name. Also, permission to call the redshift:GetClusterCredentials
operation is
* required.
*
*
*
*
*
*
* For more information about the Amazon Redshift Data API and CLI usage examples, see Using the Amazon Redshift Data API in
* the Amazon Redshift Management Guide.
*
*
* @param listSchemasRequest
* @return Result of the ListSchemas operation returned by the service.
* @throws ValidationException
* The Amazon Redshift Data API operation failed due to invalid input.
* @throws InternalServerException
* The Amazon Redshift Data API operation failed due to invalid input.
* @throws DatabaseConnectionException
* Connection to a database failed.
* @throws SdkException
* Base class for all exceptions that can be thrown by the SDK (both service and client). Can be used for
* catch all scenarios.
* @throws SdkClientException
* If any client side error occurs such as an IO related failure, failure to get credentials, etc.
* @throws RedshiftDataException
* Base class for all service exceptions. Unknown exceptions will be thrown as an instance of this type.
* @sample RedshiftDataClient.ListSchemas
* @see AWS API
* Documentation
*/
@Override
public ListSchemasResponse listSchemas(ListSchemasRequest listSchemasRequest) throws ValidationException,
InternalServerException, DatabaseConnectionException, AwsServiceException, SdkClientException, RedshiftDataException {
JsonOperationMetadata operationMetadata = JsonOperationMetadata.builder().hasStreamingSuccessResponse(false)
.isPayloadJson(true).build();
HttpResponseHandler responseHandler = protocolFactory.createResponseHandler(operationMetadata,
ListSchemasResponse::builder);
HttpResponseHandler errorResponseHandler = createErrorResponseHandler(protocolFactory,
operationMetadata);
SdkClientConfiguration clientConfiguration = updateSdkClientConfiguration(listSchemasRequest, this.clientConfiguration);
List metricPublishers = resolveMetricPublishers(clientConfiguration, listSchemasRequest
.overrideConfiguration().orElse(null));
MetricCollector apiCallMetricCollector = metricPublishers.isEmpty() ? NoOpMetricCollector.create() : MetricCollector
.create("ApiCall");
try {
apiCallMetricCollector.reportMetric(CoreMetric.SERVICE_ID, "Redshift Data");
apiCallMetricCollector.reportMetric(CoreMetric.OPERATION_NAME, "ListSchemas");
return clientHandler.execute(new ClientExecutionParams()
.withOperationName("ListSchemas").withProtocolMetadata(protocolMetadata).withResponseHandler(responseHandler)
.withErrorResponseHandler(errorResponseHandler).withRequestConfiguration(clientConfiguration)
.withInput(listSchemasRequest).withMetricCollector(apiCallMetricCollector)
.withMarshaller(new ListSchemasRequestMarshaller(protocolFactory)));
} finally {
metricPublishers.forEach(p -> p.publish(apiCallMetricCollector.collect()));
}
}
/**
*
* List of SQL statements. By default, only finished statements are shown. A token is returned to page through the
* statement list.
*
*
* For more information about the Amazon Redshift Data API and CLI usage examples, see Using the Amazon Redshift Data API in
* the Amazon Redshift Management Guide.
*
*
* @param listStatementsRequest
* @return Result of the ListStatements operation returned by the service.
* @throws ValidationException
* The Amazon Redshift Data API operation failed due to invalid input.
* @throws InternalServerException
* The Amazon Redshift Data API operation failed due to invalid input.
* @throws SdkException
* Base class for all exceptions that can be thrown by the SDK (both service and client). Can be used for
* catch all scenarios.
* @throws SdkClientException
* If any client side error occurs such as an IO related failure, failure to get credentials, etc.
* @throws RedshiftDataException
* Base class for all service exceptions. Unknown exceptions will be thrown as an instance of this type.
* @sample RedshiftDataClient.ListStatements
* @see AWS
* API Documentation
*/
@Override
public ListStatementsResponse listStatements(ListStatementsRequest listStatementsRequest) throws ValidationException,
InternalServerException, AwsServiceException, SdkClientException, RedshiftDataException {
JsonOperationMetadata operationMetadata = JsonOperationMetadata.builder().hasStreamingSuccessResponse(false)
.isPayloadJson(true).build();
HttpResponseHandler responseHandler = protocolFactory.createResponseHandler(operationMetadata,
ListStatementsResponse::builder);
HttpResponseHandler errorResponseHandler = createErrorResponseHandler(protocolFactory,
operationMetadata);
SdkClientConfiguration clientConfiguration = updateSdkClientConfiguration(listStatementsRequest, this.clientConfiguration);
List metricPublishers = resolveMetricPublishers(clientConfiguration, listStatementsRequest
.overrideConfiguration().orElse(null));
MetricCollector apiCallMetricCollector = metricPublishers.isEmpty() ? NoOpMetricCollector.create() : MetricCollector
.create("ApiCall");
try {
apiCallMetricCollector.reportMetric(CoreMetric.SERVICE_ID, "Redshift Data");
apiCallMetricCollector.reportMetric(CoreMetric.OPERATION_NAME, "ListStatements");
return clientHandler.execute(new ClientExecutionParams()
.withOperationName("ListStatements").withProtocolMetadata(protocolMetadata)
.withResponseHandler(responseHandler).withErrorResponseHandler(errorResponseHandler)
.withRequestConfiguration(clientConfiguration).withInput(listStatementsRequest)
.withMetricCollector(apiCallMetricCollector)
.withMarshaller(new ListStatementsRequestMarshaller(protocolFactory)));
} finally {
metricPublishers.forEach(p -> p.publish(apiCallMetricCollector.collect()));
}
}
/**
*
* List the tables in a database. If neither SchemaPattern
nor TablePattern
are specified,
* then all tables in the database are returned. A token is returned to page through the table list. Depending on
* the authorization method, use one of the following combinations of request parameters:
*
*
* -
*
* Secrets Manager - when connecting to a cluster, provide the secret-arn
of a secret stored in Secrets
* Manager which has username
and password
. The specified secret contains credentials to
* connect to the database
you specify. When you are connecting to a cluster, you also supply the
* database name, If you provide a cluster identifier (dbClusterIdentifier
), it must match the cluster
* identifier stored in the secret. When you are connecting to a serverless workgroup, you also supply the database
* name.
*
*
* -
*
* Temporary credentials - when connecting to your data warehouse, choose one of the following options:
*
*
* -
*
* When connecting to a serverless workgroup, specify the workgroup name and database name. The database user name
* is derived from the IAM identity. For example, arn:iam::123456789012:user:foo
has the database user
* name IAM:foo
. Also, permission to call the redshift-serverless:GetCredentials
operation
* is required.
*
*
* -
*
* When connecting to a cluster as an IAM identity, specify the cluster identifier and the database name. The
* database user name is derived from the IAM identity. For example, arn:iam::123456789012:user:foo
has
* the database user name IAM:foo
. Also, permission to call the
* redshift:GetClusterCredentialsWithIAM
operation is required.
*
*
* -
*
* When connecting to a cluster as a database user, specify the cluster identifier, the database name, and the
* database user name. Also, permission to call the redshift:GetClusterCredentials
operation is
* required.
*
*
*
*
*
*
* For more information about the Amazon Redshift Data API and CLI usage examples, see Using the Amazon Redshift Data API in
* the Amazon Redshift Management Guide.
*
*
* @param listTablesRequest
* @return Result of the ListTables operation returned by the service.
* @throws ValidationException
* The Amazon Redshift Data API operation failed due to invalid input.
* @throws InternalServerException
* The Amazon Redshift Data API operation failed due to invalid input.
* @throws DatabaseConnectionException
* Connection to a database failed.
* @throws SdkException
* Base class for all exceptions that can be thrown by the SDK (both service and client). Can be used for
* catch all scenarios.
* @throws SdkClientException
* If any client side error occurs such as an IO related failure, failure to get credentials, etc.
* @throws RedshiftDataException
* Base class for all service exceptions. Unknown exceptions will be thrown as an instance of this type.
* @sample RedshiftDataClient.ListTables
* @see AWS API
* Documentation
*/
@Override
public ListTablesResponse listTables(ListTablesRequest listTablesRequest) throws ValidationException,
InternalServerException, DatabaseConnectionException, AwsServiceException, SdkClientException, RedshiftDataException {
JsonOperationMetadata operationMetadata = JsonOperationMetadata.builder().hasStreamingSuccessResponse(false)
.isPayloadJson(true).build();
HttpResponseHandler responseHandler = protocolFactory.createResponseHandler(operationMetadata,
ListTablesResponse::builder);
HttpResponseHandler errorResponseHandler = createErrorResponseHandler(protocolFactory,
operationMetadata);
SdkClientConfiguration clientConfiguration = updateSdkClientConfiguration(listTablesRequest, this.clientConfiguration);
List metricPublishers = resolveMetricPublishers(clientConfiguration, listTablesRequest
.overrideConfiguration().orElse(null));
MetricCollector apiCallMetricCollector = metricPublishers.isEmpty() ? NoOpMetricCollector.create() : MetricCollector
.create("ApiCall");
try {
apiCallMetricCollector.reportMetric(CoreMetric.SERVICE_ID, "Redshift Data");
apiCallMetricCollector.reportMetric(CoreMetric.OPERATION_NAME, "ListTables");
return clientHandler.execute(new ClientExecutionParams()
.withOperationName("ListTables").withProtocolMetadata(protocolMetadata).withResponseHandler(responseHandler)
.withErrorResponseHandler(errorResponseHandler).withRequestConfiguration(clientConfiguration)
.withInput(listTablesRequest).withMetricCollector(apiCallMetricCollector)
.withMarshaller(new ListTablesRequestMarshaller(protocolFactory)));
} finally {
metricPublishers.forEach(p -> p.publish(apiCallMetricCollector.collect()));
}
}
@Override
public final String serviceName() {
return SERVICE_NAME;
}
private static List resolveMetricPublishers(SdkClientConfiguration clientConfiguration,
RequestOverrideConfiguration requestOverrideConfiguration) {
List publishers = null;
if (requestOverrideConfiguration != null) {
publishers = requestOverrideConfiguration.metricPublishers();
}
if (publishers == null || publishers.isEmpty()) {
publishers = clientConfiguration.option(SdkClientOption.METRIC_PUBLISHERS);
}
if (publishers == null) {
publishers = Collections.emptyList();
}
return publishers;
}
private HttpResponseHandler createErrorResponseHandler(BaseAwsJsonProtocolFactory protocolFactory,
JsonOperationMetadata operationMetadata) {
return protocolFactory.createErrorResponseHandler(operationMetadata);
}
private void updateRetryStrategyClientConfiguration(SdkClientConfiguration.Builder configuration) {
ClientOverrideConfiguration.Builder builder = configuration.asOverrideConfigurationBuilder();
RetryMode retryMode = builder.retryMode();
if (retryMode != null) {
configuration.option(SdkClientOption.RETRY_STRATEGY, AwsRetryStrategy.forRetryMode(retryMode));
} else {
Consumer> configurator = builder.retryStrategyConfigurator();
if (configurator != null) {
RetryStrategy.Builder, ?> defaultBuilder = AwsRetryStrategy.defaultRetryStrategy().toBuilder();
configurator.accept(defaultBuilder);
configuration.option(SdkClientOption.RETRY_STRATEGY, defaultBuilder.build());
} else {
RetryStrategy retryStrategy = builder.retryStrategy();
if (retryStrategy != null) {
configuration.option(SdkClientOption.RETRY_STRATEGY, retryStrategy);
}
}
}
configuration.option(SdkClientOption.CONFIGURED_RETRY_MODE, null);
configuration.option(SdkClientOption.CONFIGURED_RETRY_STRATEGY, null);
configuration.option(SdkClientOption.CONFIGURED_RETRY_CONFIGURATOR, null);
}
private SdkClientConfiguration updateSdkClientConfiguration(SdkRequest request, SdkClientConfiguration clientConfiguration) {
List plugins = request.overrideConfiguration().map(c -> c.plugins()).orElse(Collections.emptyList());
SdkClientConfiguration.Builder configuration = clientConfiguration.toBuilder();
if (plugins.isEmpty()) {
return configuration.build();
}
RedshiftDataServiceClientConfigurationBuilder serviceConfigBuilder = new RedshiftDataServiceClientConfigurationBuilder(
configuration);
for (SdkPlugin plugin : plugins) {
plugin.configureClient(serviceConfigBuilder);
}
updateRetryStrategyClientConfiguration(configuration);
return configuration.build();
}
private > T init(T builder) {
return builder
.clientConfiguration(clientConfiguration)
.defaultServiceExceptionSupplier(RedshiftDataException::builder)
.protocol(AwsJsonProtocol.AWS_JSON)
.protocolVersion("1.1")
.registerModeledException(
ExceptionMetadata.builder().errorCode("ActiveStatementsExceededException")
.exceptionBuilderSupplier(ActiveStatementsExceededException::builder).httpStatusCode(400).build())
.registerModeledException(
ExceptionMetadata.builder().errorCode("ValidationException")
.exceptionBuilderSupplier(ValidationException::builder).httpStatusCode(400).build())
.registerModeledException(
ExceptionMetadata.builder().errorCode("DatabaseConnectionException")
.exceptionBuilderSupplier(DatabaseConnectionException::builder).httpStatusCode(500).build())
.registerModeledException(
ExceptionMetadata.builder().errorCode("ResourceNotFoundException")
.exceptionBuilderSupplier(ResourceNotFoundException::builder).httpStatusCode(400).build())
.registerModeledException(
ExceptionMetadata.builder().errorCode("InternalServerException")
.exceptionBuilderSupplier(InternalServerException::builder).httpStatusCode(500).build())
.registerModeledException(
ExceptionMetadata.builder().errorCode("ExecuteStatementException")
.exceptionBuilderSupplier(ExecuteStatementException::builder).httpStatusCode(500).build())
.registerModeledException(
ExceptionMetadata.builder().errorCode("BatchExecuteStatementException")
.exceptionBuilderSupplier(BatchExecuteStatementException::builder).httpStatusCode(500).build());
}
@Override
public final RedshiftDataServiceClientConfiguration serviceClientConfiguration() {
return new RedshiftDataServiceClientConfigurationBuilder(this.clientConfiguration.toBuilder()).build();
}
@Override
public void close() {
clientHandler.close();
}
}