com.senseidb.gateway.kafka.KafkaStreamDataProvider Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of sensei-gateways Show documentation
Show all versions of sensei-gateways Show documentation
a collection of sensei gateways
/**
* This software is licensed to you under the Apache License, Version 2.0 (the
* "Apache License").
*
* LinkedIn's contributions are made under the Apache License. If you contribute
* to the Software, the contributions will be deemed to have been made under the
* Apache License, unless you expressly indicate otherwise. Please do not make any
* contributions that would be inconsistent with the Apache License.
*
* You may obtain a copy of the Apache License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, this software
* distributed under the Apache License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the Apache
* License for the specific language governing permissions and limitations for the
* software governed under the Apache License.
*
* © 2012 LinkedIn Corp. All Rights Reserved.
*/
package com.senseidb.gateway.kafka;
import java.nio.ByteBuffer;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaMessageStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.message.Message;
import org.apache.log4j.Logger;
import org.json.JSONObject;
import proj.zoie.api.DataConsumer.DataEvent;
import proj.zoie.impl.indexing.StreamDataProvider;
import com.senseidb.indexing.DataSourceFilter;
public class KafkaStreamDataProvider extends StreamDataProvider{
private final String _topic;
private final String _consumerGroupId;
private Properties _kafkaConfig;
private ConsumerConnector _consumerConnector;
private ConsumerIterator _consumerIterator;
private static Logger logger = Logger.getLogger(KafkaStreamDataProvider.class);
private final String _zookeeperUrl;
private final int _kafkaSoTimeout;
private volatile boolean _started = false;
private final DataSourceFilter _dataConverter;
public KafkaStreamDataProvider(Comparator versionComparator,String zookeeperUrl,int soTimeout,int batchSize,
String consumerGroupId,String topic,long startingOffset,DataSourceFilter dataConverter){
this(versionComparator, zookeeperUrl, soTimeout, batchSize, consumerGroupId, topic, startingOffset, dataConverter, null);
}
public KafkaStreamDataProvider(Comparator versionComparator,String zookeeperUrl,int soTimeout,int batchSize,
String consumerGroupId,String topic,long startingOffset,DataSourceFilter dataConverter,Properties kafkaConfig){
super(versionComparator);
_consumerGroupId = consumerGroupId;
_topic = topic;
super.setBatchSize(batchSize);
_zookeeperUrl = zookeeperUrl;
_kafkaSoTimeout = soTimeout;
_consumerConnector = null;
_consumerIterator = null;
_kafkaConfig = kafkaConfig;
if (kafkaConfig == null) {
kafkaConfig = new Properties();
}
_dataConverter = dataConverter;
if (_dataConverter == null){
throw new IllegalArgumentException("kafka data converter is null");
}
}
@Override
public void setStartingOffset(String version){
}
@Override
public DataEvent next() {
if (!_started) return null;
try
{
if (!_consumerIterator.hasNext())
return null;
}
catch (Exception e)
{
// Most likely timeout exception - ok to ignore
return null;
}
Message msg = _consumerIterator.next();
if (logger.isDebugEnabled()){
logger.debug("got new message: "+msg);
}
long version = System.currentTimeMillis();
JSONObject data;
try {
int size = msg.payloadSize();
ByteBuffer byteBuffer = msg.payload();
byte[] bytes = new byte[size];
byteBuffer.get(bytes,0,size);
data = _dataConverter.filter(new DataPacket(bytes,0,size));
if (logger.isDebugEnabled()){
logger.debug("message converted: "+data);
}
return new DataEvent(data, String.valueOf(version));
} catch (Exception e) {
logger.error(e.getMessage(),e);
return null;
}
}
@Override
public void reset() {
}
@Override
public void start() {
Properties props = new Properties();
props.put("zk.connect", _zookeeperUrl);
//props.put("consumer.timeout.ms", _kafkaSoTimeout);
props.put("groupid", _consumerGroupId);
for (String key : _kafkaConfig.stringPropertyNames()) {
props.put(key, _kafkaConfig.getProperty(key));
}
ConsumerConfig consumerConfig = new ConsumerConfig(props);
_consumerConnector = Consumer.createJavaConsumerConnector(consumerConfig);
Map topicCountMap = new HashMap();
topicCountMap.put(_topic, 1);
Map>> topicMessageStreams =
_consumerConnector.createMessageStreams(topicCountMap);
List> streams = topicMessageStreams.get(_topic);
KafkaMessageStream kafkaMessageStream = streams.iterator().next();
_consumerIterator = kafkaMessageStream.iterator();
super.start();
_started = true;
}
@Override
public void stop() {
_started = false;
try
{
if (_consumerConnector!=null){
_consumerConnector.shutdown();
}
}
finally
{
super.stop();
}
}
}
© 2015 - 2024 Weber Informatics LLC | Privacy Policy