![JAR search and dependency download from the Maven repository](/logo.png)
net.sf.jrtps.udds.UDDSHistoryCache Maven / Gradle / Ivy
package net.sf.jrtps.udds;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.concurrent.CopyOnWriteArrayList;
import net.sf.jrtps.Marshaller;
import net.sf.jrtps.OutOfResources;
import net.sf.jrtps.QualityOfService;
import net.sf.jrtps.QualityOfService.PolicyListener;
import net.sf.jrtps.builtin.DiscoveredData;
import net.sf.jrtps.message.parameter.CoherentSet;
import net.sf.jrtps.message.parameter.KeyHash;
import net.sf.jrtps.message.parameter.QosDeadline;
import net.sf.jrtps.message.parameter.QosPolicy;
import net.sf.jrtps.message.parameter.QosResourceLimits;
import net.sf.jrtps.rtps.ChangeKind;
import net.sf.jrtps.rtps.Sample;
import net.sf.jrtps.types.Duration;
import net.sf.jrtps.types.EntityId;
import net.sf.jrtps.types.SequenceNumber;
import net.sf.jrtps.util.Watchdog;
import net.sf.jrtps.util.Watchdog.Listener;
import net.sf.jrtps.util.Watchdog.Task;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* HistoryCache holds Samples of entities. For writers, it is used to keep keep
* history of changes so that late joining readers are capable of getting the historical data.
*
* Samples on the reader side are made available through HistoryCache.
*
* @param T type of the samples managed by this HistoryCache
* @param ENTITY_DATA type of the communication listeners attached to this history cache.
*/
class UDDSHistoryCache implements HistoryCache {
private static final Logger logger = LoggerFactory.getLogger(UDDSHistoryCache.class);
// QoS policies affecting writer cache
private final QosResourceLimits resource_limits;
protected final List> listeners = new CopyOnWriteArrayList<>();
private volatile CoherentSet coherentSet; // Current CoherentSet, used by writer
// Main collection to hold instances. ResourceLimits is checked against this map
private final Map> instances = new LinkedHashMap<>();
private long deadLinePeriod = -1; // -1 represents INFINITE
// An ordered set of cache changes.
protected final SortedSet> samples = Collections.synchronizedSortedSet(new TreeSet<>(
new Comparator>() {
@Override
public int compare(Sample o1, Sample o2) {
return (int) (o1.getSequenceNumber() - o2.getSequenceNumber());
}
}));
protected final Marshaller marshaller;
protected volatile long seqNum; // sequence number of a Sample
protected final EntityId entityId;
protected final Watchdog watchdog;
private List> communicationListeners;
private final QualityOfService qos;
UDDSHistoryCache(EntityId eId, Marshaller marshaller, QualityOfService qos, Watchdog watchdog, boolean isReaderCache) {
this.entityId = eId;
this.marshaller = marshaller;
this.qos = qos;
this.watchdog = watchdog;
resource_limits = qos.getResourceLimits();
setDeadlinePeriod(qos.getDeadline());
qos.addPolicyListener(new PolicyListener() {
@Override
public void policyChanged(QosPolicy policy) {
if (policy instanceof QosDeadline) {
setDeadlinePeriod((QosDeadline) policy);
}
}
});
}
private void setDeadlinePeriod(QosDeadline dl) {
Duration period = dl.getPeriod();
if (!Duration.INFINITE.equals(period)) {
this.deadLinePeriod = period.asMillis();
logger.debug("deadline period was set to {}", deadLinePeriod);
}
}
/**
* Dispose a sample.
* @param sample
* @param timestamp
*/
@Override
public void dispose(T sample, long timestamp) {
addSample(new Sample(null, marshaller, ++seqNum, timestamp, ChangeKind.DISPOSE, sample));
}
/**
* Unregisters an instance.
* @param sample
* @param timestamp
*/
@Override
public void unregister(T sample, long timestamp) {
addSample(new Sample(null, marshaller, ++seqNum, timestamp, ChangeKind.UNREGISTER, sample));
}
/**
* Writes a sample.
* @param sample
* @param timestamp
*/
@Override
public void write(T sample, long timestamp) {
addSample(new Sample(null, marshaller, ++seqNum, timestamp, ChangeKind.WRITE, sample));
}
/**
* Registers an instance
* @param sample
* @param timestamp
* @return an Instance
*/
@Override
public Instance register(T sample, long timestamp) {
if (marshaller.hasKey()) {
return getOrCreateInstance(new KeyHash(marshaller.extractKey(sample)));
}
return null;
}
void addListener(SampleListener aListener) {
listeners.add(aListener);
}
void removeListener(SampleListener aListener) {
listeners.remove(aListener);
}
protected Sample addSample(Sample sample) {
logger.trace("addSample({})", sample);
KeyHash key = sample.getKey();
ChangeKind kind = sample.getKind();
sample.setCoherentSet(coherentSet); // Set the CoherentSet attribute, if it exists
Instance inst = null;
try {
inst = getOrCreateInstance(key);
Sample latest = inst.getLatest();
if (latest != null && latest.getTimestamp() > sample.getTimestamp()) {
logger.debug("Rejecting sample, since its timestamp {} is less than instances latest timestamp {}",
sample.getTimestamp(), latest.getTimestamp());
return null;
}
if (inst.applyTimeBasedFilter(this, sample)) { // Check, if TIME_BASED_FILTER applies
return null;
}
if (kind == ChangeKind.DISPOSE || kind ==ChangeKind.UNREGISTER) {
Instance removedInstance = instances.remove(key);
if (removedInstance != null) {
removedInstance.dispose(); // cancels deadline monitor
}
}
else {
logger.trace("[{}] Creating sample {}", entityId, seqNum + 1);
Sample removedSample =
inst.addSample(sample, samples.size() == resource_limits.getMaxSamples());
if (removedSample != null) {
synchronized (samples) {
samples.remove(removedSample);
}
}
}
synchronized (samples) {
samples.add(sample);
}
return sample;
}
catch(OutOfResources oor) {
logger.debug("Got OutOfResources: {}", oor.getMessage());
throw oor;
}
}
protected Instance getOrCreateInstance(final KeyHash key) {
Instance inst = instances.get(key);
if (inst != null) {
return inst;
}
logger.trace("[{}] Creating new instance {}", entityId, key);
if (resource_limits.getMaxInstances() != -1 &&
instances.size() == resource_limits.getMaxInstances()) {
throw new OutOfResources(OutOfResources.Kind.MAX_INSTANCES_EXCEEDED,
resource_limits.getMaxInstances());
}
final Instance newInst = new Instance(key, qos, watchdog);
if (deadLinePeriod != -1) {
Task wdTask = watchdog.addTask(deadLinePeriod, new Listener() {
@Override
public void triggerTimeMissed() {
logger.debug("deadline missed for {}", key);
newInst.deadlineMissed();
for (CommunicationListener> cl : communicationListeners) {
cl.deadlineMissed(key);
}
}
});
newInst.setDeadlineMonitorTask(wdTask);
}
instances.put(key, newInst);
return newInst;
}
// --- experimental code follows. These are paired with the ones in DataReader ----------------
@Override
public Set> getInstances() {
Collection> values = instances.values();
return new HashSet<>(values);
}
@Override
public Instance getInstance(KeyHash key) {
return instances.get(key);
}
void clear(Sample aSample) {
LinkedList> samples = new LinkedList<>();
samples.add(aSample);
clear(samples);
}
void clear(List> samplesToClear) {
for (Sample s : samplesToClear) {
Instance inst = instances.get(s.getKey());
if (inst != null) {
inst.removeSample(s);
}
}
synchronized (samples) {
samples.removeAll(samplesToClear);
}
}
@Override
public void coherentChangesBegin() {
coherentSet = new CoherentSet(new SequenceNumber(seqNum + 1));
logger.debug("coherentChangesBegin({})", seqNum + 1);
}
@Override
public void coherentChangesEnd() {
if (coherentSet != null) {
logger.debug("coherentChangesEnd({})", coherentSet.getStartSeqNum().getAsLong());
}
coherentSet = null;
addSample(new Sample(++seqNum)); // Add a Sample denoting end of CoherentSet
}
/**
* Gets all the Samples, whose sequence number is greater than given
* sequence number. If there is no such samples found, an empty set is
* returned.
*
* @param sequenceNumber sequence number to compare to
* @return a SortedSet of Samples
*/
@Override
public LinkedList> getSamplesSince(long sequenceNumber) {
logger.trace("[{}] getChangesSince({})", entityId, sequenceNumber);
synchronized (samples) {
for (Sample cc : samples) {
if (cc.getSequenceNumber() > sequenceNumber) {
SortedSet> tailSet = samples.tailSet(cc);
logger.trace("[{}] returning {}", entityId, tailSet);
return new LinkedList>(tailSet);
}
}
}
logger.trace("[{}] No chances to return for seq num {}", entityId, sequenceNumber);
return new LinkedList<>();
}
void setCommunicationListeners(List> communicationListeners) {
this.communicationListeners = communicationListeners;
}
@Override
public void close() {
listeners.clear();
samples.clear();
instances.clear();
communicationListeners.clear();
}
}