org.apache.hadoop.dynamodb.DynamoDBConstants Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of emr-dynamodb-hadoop Show documentation
Show all versions of emr-dynamodb-hadoop Show documentation
EMR DynamoDB Hadoop Connector
/**
* Copyright 2012-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file
* except in compliance with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "LICENSE.TXT" file accompanying this file. This file is distributed on an "AS IS"
* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under the License.
*/
package org.apache.hadoop.dynamodb;
import com.amazonaws.regions.Regions;
/**
* Contains constants used for the Hadoop to DynamoDB connection. Note that many of these string
* literals are specifically chosen to allow for backward compatibility with Hive and should not be
* changed.
*/
public interface DynamoDBConstants {
// Credentials
String DYNAMODB_ACCESS_KEY_CONF = "dynamodb.awsAccessKeyId";
String DYNAMODB_SECRET_KEY_CONF = "dynamodb.awsSecretAccessKey";
String DEFAULT_ACCESS_KEY_CONF = "fs.s3.awsAccessKeyId";
String DEFAULT_SECRET_KEY_CONF = "fs.s3.awsSecretAccessKey";
String CUSTOM_CREDENTIALS_PROVIDER_CONF = "dynamodb.customAWSCredentialsProvider";
// Table constants
String DYNAMODB_COLUMN_MAPPING = "dynamodb.column.mapping";
// JobConf constants
String DYNAMODB_FILTER_PUSHDOWN = "dynamodb.filter.pushdown";
String ENDPOINT = "dynamodb.endpoint";
String REGION_ID = "dynamodb.regionid";
String REGION = "dynamodb.region";
String PROXY_HOST = "dynamodb.proxy.hostname";
String PROXY_PORT = "dynamodb.proxy.port";
String PROXY_USERNAME = "dynamodb.proxy.username";
String PROXY_PASSWORD = "dynamodb.proxy.password";
// The TABLE_NAME constant is here for backwards compatibility with Hive
String TABLE_NAME = "dynamodb.table.name";
String OUTPUT_TABLE_NAME = "dynamodb.output.tableName";
String INPUT_TABLE_NAME = "dynamodb.input.tableName";
String THROUGHPUT_WRITE_PERCENT = "dynamodb.throughput.write.percent";
String THROUGHPUT_READ_PERCENT = "dynamodb.throughput.read.percent";
String READ_THROUGHPUT = "dynamodb.throughput.read";
String WRITE_THROUGHPUT = "dynamodb.throughput.write";
String AVG_ITEM_SIZE = "dynamodb.item.average.size";
String ITEM_COUNT = "dynamodb.item.count";
String TABLE_SIZE_BYTES = "dynamodb.table.size-bytes";
String MAX_MAP_TASKS = "dynamodb.max.map.tasks";
String DEFAULT_THROUGHPUT_PERCENTAGE = "0.5";
String MAX_ITEM_SIZE = "dynamodb.max.item.size";
String MAX_BATCH_SIZE = "dynamodb.max.batch.size";
String MAX_ITEMS_PER_BATCH = "dynamodb.max.batch.items";
// http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Limits.html
long DEFAULT_MAX_ITEM_SIZE = 400 * 1024;
long DEFAULT_MAX_BATCH_SIZE = 16 * 1024 * 1024;
long DEFAULT_MAX_ITEMS_PER_BATCH = 25;
double READ_EVENTUALLY_TO_STRONGLY_CONSISTENT_FACTOR = 2;
String SCAN_SEGMENTS = "dynamodb.scan.segments";
int MAX_SCAN_SEGMENTS = 1000000;
int MIN_SCAN_SEGMENTS = 1;
double BYTES_PER_READ_CAPACITY_UNIT = 4096;
double BYTES_PER_WRITE_CAPACITY_UNIT = 1024;
long MAX_BYTES_PER_SEGMENT = 1024L * 1024L * 1024L;
double MIN_IO_PER_SEGMENT = 100.0;
int PSCAN_SEGMENT_BATCH_SIZE = 50;
int PSCAN_MULTIPLEXER_CAPACITY = 600;
int RATE_CONTROLLER_WINDOW_SIZE_SEC = 5;
String EXPORT_FORMAT_VERSION = "dynamodb.export.format.version";
String DEFAULT_AWS_REGION = Regions.US_EAST_1.getName();
int DEFAULT_AVERAGE_ITEM_SIZE_IN_BYTES = 100;
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy