
com.intellifylearning.flush.Flusher Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of intellisense Show documentation
Show all versions of intellisense Show documentation
IntelliSense Sensor API for Java
package com.intellifylearning.flush;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.intellify.api.caliper.CaliperEntity;
import com.intellify.api.caliper.impl.IntellifyBase;
import com.intellifylearning.Client;
import com.intellifylearning.Constants;
import com.intellifylearning.IntellifyQueueFullException;
import com.intellifylearning.models.Batch;
import com.intellifylearning.models.BatchIntellifyBase;
import com.intellifylearning.request.IRequester;
import com.intellifylearning.utils.ManualResetEvent;
public class Flusher extends Thread {
private static final Logger logger = LoggerFactory
.getLogger(Constants.LOGGER);
private final LinkedBlockingQueue queue;
private final LinkedBlockingQueue queue2;
private final ManualResetEvent idle;
private final ManualResetEvent idle2;
private final Client client;
private final IBatchFactory factory;
private final IRequester requester;
private final boolean errorOnQueueFull;
private final int queueWaitTimeout;
private boolean go;
public Flusher(Client client, IBatchFactory factory, IRequester requester) {
this.client = client;
this.factory = factory;
this.requester = requester;
int maxQueueSize = client.getOptions().getMaxQueueSize();
this.queue = new LinkedBlockingQueue(maxQueueSize);
this.queue2 = new LinkedBlockingQueue(maxQueueSize);
this.errorOnQueueFull = client.getOptions().isErrorOnQueueFull();
this.queueWaitTimeout = client.getOptions().getQueueWaitTimeout();
this.go = true;
this.idle = new ManualResetEvent(true);
this.idle2 = new ManualResetEvent(true);
}
@Override
public void run() {
while (go) {
batchAndSend();
batchAndSendIntellifyBaseObjects();
try {
// thread context switch to avoid resource contention
Thread.sleep(0);
} catch (InterruptedException e) {
logger.error("Interrupted while sleeping flushing thread.", e);
}
}
}
/**
* Batch and send v1 style events
*/
private void batchAndSend() {
List current = new LinkedList();
int batchSize = getBatchSize();
do {
if (queue.size() == 0) {
idle.set();
}
CaliperEntity caliperEntity = null;
try {
// wait half a second for an item to appear
// otherwise yield to confirm that we aren't restarting
caliperEntity = queue.poll(500, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
logger.error(
"Interrupted while trying to flush intellisense queue.",
e);
}
if (caliperEntity != null) {
// we are no longer idle since there's messages to be
// processed
idle.reset();
current.add(caliperEntity);
client.getStatistics().updateQueued(this.queue.size());
}
}
// keep iterating and collecting the current batch
// while we're active, there's something in the queue, and we
// haven't already
// over-filled this batch
while (go && queue.size() > 0
&& current.size() < batchSize);
if (current.size() > 0) {
// we have something to send in this batch
logger.debug("Preparing to send batch.. [ " + current.size()
+ " items]");
Batch batch = factory.create(current);
client.getStatistics().updateFlushAttempts(1);
requester.send(batch);
logger.debug("Initiated batch request .. [ " + current.size()
+ " items]");
}
}
/**
* Batch and send Caliper beta style events
*/
private void batchAndSendIntellifyBaseObjects() {
List current = new LinkedList();
int batchSize = getBatchSize();
do {
if (queue2.size() == 0) {
idle2.set();
}
IntellifyBase intellifyBaseEntity = null;
try {
// wait half a second for an item to appear
// otherwise yield to confirm that we aren't restarting
intellifyBaseEntity = queue2.poll(500, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
logger.error(
"Interrupted while trying to flush intellisense queue for IntellifyBase objects",
e);
}
if (intellifyBaseEntity != null) {
// we are no longer idle since there's messages to be
// processed
idle2.reset();
current.add(intellifyBaseEntity);
client.getStatistics().updateQueued(this.queue2.size());
}
}
// keep iterating and collecting the current batch
// while we're active, there's something in the queue, and we
// haven't already
// over-filled this batch
while (go && queue2.size() > 0
&& current.size() < batchSize);
if (current.size() > 0) {
// we have something to send in this batch
logger.debug("Preparing to send batch of IntellifyBase objects.. [ "
+ current.size() + " items]");
BatchIntellifyBase batch = factory
.createIntellifyBaseBatch(current);
client.getStatistics().updateFlushAttempts(1);
requester.sendIntellifyBaseBatch(batch);
logger.debug("Initiated batch of IntellifyBase objects request for [ "
+ current.size() + " items]");
}
}
private int getBatchSize() {
int batchSize = Constants.BATCH_INCREMENT;
if (this.client.getOptions().getBatchSize() > 0) {
batchSize = this.client.getOptions().getBatchSize();
}
return batchSize;
}
public void enqueue(CaliperEntity payload) {
enqueue(queue, payload);
}
/**
* EntityData for Jan 2015 implementation using Caliper beta as base
*
* @param intellifyBaseObj data to send
*/
public void enqueue(IntellifyBase intellifyBaseObj) {
enqueue(queue2, intellifyBaseObj);
}
private void enqueue(LinkedBlockingQueue q, T obj) {
try {
boolean success;
if (errorOnQueueFull) {
success = q.offer(obj);
} else {
success = q.offer(obj, queueWaitTimeout, TimeUnit.MILLISECONDS);
}
if (!success) {
throw new IntellifyQueueFullException(
"Exceeded wait timeout of " + (errorOnQueueFull ? 0 : queueWaitTimeout)
+ "ms for intellify queue with capacity: " + client.getOptions().getMaxQueueSize());
}
client.getStatistics().updateInserted(1);
client.getStatistics().updateQueued(q.size());
} catch (InterruptedException e) {
throw new IntellifyQueueFullException("Interrupted waiting on full intellify queue with capacity: "
+ client.getOptions().getMaxQueueSize());
}
}
public void flush() {
try {
idle.waitOne();
} catch (InterruptedException e) {
logger.error("Interrupted while waiting for the thread to flush.",
e);
}
}
public void close() {
go = false;
queue.clear();
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy