org.frameworkset.tran.kafka.input.KafkaTranBatchConsumer2ndStore Maven / Gradle / Ivy
Show all versions of bboss-datatran-kafka1x Show documentation
package org.frameworkset.tran.kafka.input;
/**
* Copyright 2008 biaoping.yin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import kafka.message.MessageAndMetadata;
import org.apache.kafka.common.serialization.Deserializer;
import org.frameworkset.plugin.kafka.KafkaBatchConsumer2ndStore;
import org.frameworkset.tran.BaseDataTran;
import org.frameworkset.tran.Record;
import org.frameworkset.tran.kafka.*;
import org.frameworkset.tran.kafka.codec.CodecObjectUtil;
import org.frameworkset.tran.plugin.kafka.input.Kafka1InputConfig;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static org.frameworkset.tran.plugin.kafka.input.KafkaInputConfig.CODEC_JSON;
import static org.frameworkset.tran.plugin.kafka.input.KafkaInputConfig.CODEC_TEXT;
/**
*
Description:
*
* Copyright (c) 2018
* @Date 2019/9/28 10:41
* @author biaoping.yin
* @version 1.0
*/
public class KafkaTranBatchConsumer2ndStore extends KafkaBatchConsumer2ndStore {
private Kafka1InputConfig kafka1InputConfig;
private Deserializer valueDeserializer;
private Deserializer keyDeserializer;
public KafkaTranBatchConsumer2ndStore(BaseDataTran asynESOutPutDataTran, Kafka1InputConfig kafka1InputConfig) {
this.asynESOutPutDataTran = asynESOutPutDataTran;
this.kafka1InputConfig = kafka1InputConfig;
if(kafka1InputConfig.getValueCodec() != null) {
valueDeserializer = CodecObjectUtil.getDeserializer(kafka1InputConfig.getValueCodec());
}
else{
valueDeserializer = CodecObjectUtil.getDeserializer(CODEC_JSON);
}
if(kafka1InputConfig.getKeyCodec() != null) {
keyDeserializer = CodecObjectUtil.getDeserializer(kafka1InputConfig.getKeyCodec());
}
else{
keyDeserializer = CodecObjectUtil.getDeserializer(CODEC_TEXT);
}
}
private BaseDataTran asynESOutPutDataTran;
@Override
public void store(List> messages) throws Exception {
List records = parserData(messages);
asynESOutPutDataTran.appendData(new KafkaData(records));
}
@Override
public void store(MessageAndMetadata message) throws Exception {
List> messages = new ArrayList>();
messages.add(message);
store(messages);
}
private void deserializeData(MessageAndMetadata consumerRecord, List results){
Object value = valueDeserializer.deserialize(consumerRecord.topic(),consumerRecord.message());
Object key = keyDeserializer.deserialize(consumerRecord.topic(),consumerRecord.key());
if (value instanceof List) {
List rs = (List) value;
for (int i = 0; i < rs.size(); i++) {
Object v = rs.get(i);
if (v instanceof Map) {
results.add(new KafkaMapRecord(asynESOutPutDataTran.getTaskContext(),key, (Map) v,consumerRecord.offset()));
} else {
results.add(new KafkaStringRecord(asynESOutPutDataTran.getTaskContext(),key, (String) v,consumerRecord.offset()));
}
}
//return new KafkaMapRecord((ConsumerRecord