com.aliyun.datahub.client.model.SinkOdpsConfig Maven / Gradle / Ivy
The newest version!
package com.aliyun.datahub.client.model;
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
public class SinkOdpsConfig extends SinkConfig {
/**
* How to divide data into different MaxCompute partition.
*/
public enum PartitionMode {
/**
* Partitioned by user defined columns
*/
USER_DEFINE,
/**
* First should have a column named 'event_time TIMESTAMP', and then partitioned by this column value.
*/
SYSTEM_TIME,
/**
* Partitioned by time that a record written into DataHub
*/
EVENT_TIME,
/**
* Partitioned by time from attributes
*/
META_TIME,
}
/**
* MaxCompute project name.
*/
private String project;
/**
* MaxCompute table name
*/
private String table;
/**
* The endpoint of the MaxCompute service
*/
private String endpoint;
/**
* The endpoint of the Tunnel service which is bound with MaxCompute
* If not specified, DataHub will get tunnel endpoint from MaxCompute project config.
*/
private String tunnelEndpoint;
/**
* The accessId to visit MaxCompute. The accessId should have permission to visit the corresponding MaxCompute table.
*/
private String accessId;
/**
* The accessKey to visit MaxCompute.
*/
private String accessKey;
/**
* Specify how to partition data info corresponding MaxCompute partition. {@link PartitionMode}
*/
private PartitionMode partitionMode;
/**
* Specify the column as the event time
*/
private String eventTimeCol;
/**
* How long time data that a MaxCompute partition to store. Used in SYSTEM_TIME and EVENT_TIME mode.
*
* Unit: Minute
*/
private int timeRange;
/**
* Timezone for odps table, used for compute partition
* If not set, use odps project config or default(Asia/Shanghai)
*/
private String timeZone;
/**
* Specify how to compute MaxCompute partition with the specified column value.
*/
private PartitionConfig partitionConfig;
/**
* PartitionField for USER_DEFINE mode
*/
private List partitionField;
/**
* Specify splitKey if blob data should split to single lines before sink to ODPS, only for BLOB topic
*/
private String splitKey;
/**
* Where base64encode before sinked into ODPS, only for BLOB topic. Usually user should not use this parameter
*/
private Boolean base64Encode;
/**
* Binary parser config
*/
private BinaryParserConfig binaryParserConfig = null;
/**
* Not used by normal user
*/
private InternalConfig internalConfig = new InternalConfig();
public String getProject() {
return project;
}
public void setProject(String project) {
this.project = project;
}
public String getTable() {
return table;
}
public void setTable(String table) {
this.table = table;
}
public String getEndpoint() {
return endpoint;
}
public void setEndpoint(String endpoint) {
this.endpoint = endpoint;
}
public String getTunnelEndpoint() {
return tunnelEndpoint;
}
public void setTunnelEndpoint(String tunnelEndpoint) {
this.tunnelEndpoint = tunnelEndpoint;
}
public String getAccessId() {
return accessId;
}
public void setAccessId(String accessId) {
this.accessId = accessId;
}
public String getAccessKey() {
return accessKey;
}
public void setAccessKey(String accessKey) {
this.accessKey = accessKey;
}
public PartitionMode getPartitionMode() {
return partitionMode;
}
public void setPartitionMode(PartitionMode partitionMode) {
this.partitionMode = partitionMode;
}
public void setEventTimeCol(String eventTimeCol) {
this.eventTimeCol = eventTimeCol;
}
public String getEventTimeCol() {
return eventTimeCol;
}
public int getTimeRange() {
return timeRange;
}
public void setTimeRange(int timeRange) {
this.timeRange = timeRange;
}
public PartitionConfig getPartitionConfig() {
return partitionConfig;
}
public void setPartitionConfig(PartitionConfig partitionConfig) {
this.partitionConfig = partitionConfig;
}
public String getSplitKey() {
return splitKey;
}
public void setSplitKey(String splitKey) {
this.splitKey = splitKey;
}
public String getTimeZone() {
return timeZone;
}
public void setTimeZone(String timeZone) {
this.timeZone = timeZone;
}
public InternalConfig getInternalConfig() {
return internalConfig;
}
public void setInternalConfig(InternalConfig internalConfig) {
this.internalConfig = internalConfig;
}
public Boolean getBase64Encode() {
return base64Encode;
}
public void setBase64Encode(Boolean base64Encode) {
this.base64Encode = base64Encode;
}
public static class PartitionConfig {
private Map configMap = new LinkedHashMap<>();
public void addConfig(String key, String value) {
configMap.put(key, value);
}
public Map getConfigMap() {
return configMap;
}
}
public BinaryParserConfig getBinaryParserConfig() {
return binaryParserConfig;
}
public void setBinaryParserConfig(BinaryParserConfig binaryParserConfig) {
this.binaryParserConfig = binaryParserConfig;
}
@JsonIgnore
public List getPartitionField() {
return partitionField;
}
public void setPartitionField(List partitionField) {
this.partitionField = partitionField;
}
public static class InternalConfig {
/**
* Internal use.
*/
private String subId;
/**
* Internal use for compatible. Normal user should not use it
*/
private Boolean addTtHostLine;
/**
* SinkOdps use compressed data, used for tt
*/
private Boolean sinkCompressData;
/**
* Whether aplus data type, used for tt
*/
private Boolean aplusDataType;
public String getSubId() {
return subId;
}
public void setSubId(String subId) {
this.subId = subId;
}
public Boolean getAddTtHostLine() {
return addTtHostLine;
}
public void setAddTtHostLine(Boolean addTtHostLine) {
this.addTtHostLine = addTtHostLine;
}
public Boolean getSinkCompressData() {
return sinkCompressData;
}
public void setSinkCompressData(Boolean sinkCompressData) {
this.sinkCompressData = sinkCompressData;
}
public Boolean getAplusDataType() {
return aplusDataType;
}
public void setAplusDataType(Boolean aplusDataType) {
this.aplusDataType = aplusDataType;
}
}
}