org.semanticweb.HermiT.blocking.AnywhereValidatedBlocking Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of org.semanticweb.hermit Show documentation
Show all versions of org.semanticweb.hermit Show documentation
HermiT is reasoner for ontologies written using the Web Ontology Language (OWL). Given an OWL file, HermiT can determine whether or not the ontology is consistent, identify subsumption relationships between classes, and much more.
This is the maven build of HermiT and is designed for people who wish to use HermiT from within the OWL API. It is now versioned in the main HermiT version repository, although not officially supported by the HermiT developers.
The version number of this package is a composite of the HermiT version and a value representing the OWLAPI release it is compatible with. Note that the group id for the upstream HermiT is com.hermit-reasoner, while this fork is released under net.sourceforge.owlapi.
This fork exists to allow HermiT users to use newer OWLAPI versions than the ones supported by the original HermiT codebase.
This package includes the Jautomata library (http://jautomata.sourceforge.net/), and builds with it directly. This library appears to be no longer under active development, and so a "fork" seems appropriate. No development is intended or anticipated on this code base.
The newest version!
/* Copyright 2008, 2009, 2010 by the Oxford University Computing Laboratory
This file is part of HermiT.
HermiT is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
HermiT is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with HermiT. If not, see .
*/
package org.semanticweb.HermiT.blocking;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collector;
import java.util.stream.Collectors;
import org.semanticweb.HermiT.blocking.ValidatedSingleDirectBlockingChecker.ValidatedBlockingObject;
import org.semanticweb.HermiT.model.AtomicRole;
import org.semanticweb.HermiT.model.Concept;
import org.semanticweb.HermiT.model.DLClause;
import org.semanticweb.HermiT.model.DLOntology;
import org.semanticweb.HermiT.model.DataRange;
import org.semanticweb.HermiT.model.Variable;
import org.semanticweb.HermiT.monitor.TableauMonitor;
import org.semanticweb.HermiT.tableau.DLClauseEvaluator;
import org.semanticweb.HermiT.tableau.Node;
import org.semanticweb.HermiT.tableau.NodeType;
import org.semanticweb.HermiT.tableau.Tableau;
/**Anywhere validated blocking strategy.*/
public class AnywhereValidatedBlocking implements BlockingStrategy {
protected final DirectBlockingChecker m_directBlockingChecker;
protected final ValidatedBlockersCache m_currentBlockersCache;
protected BlockingValidator m_permanentBlockingValidator;
protected BlockingValidator m_additionalBlockingValidator;
protected Tableau m_tableau;
protected Node m_firstChangedNode;
protected Node m_lastValidatedUnchangedNode;
protected final boolean m_useSimpleCore;
/**
* @param directBlockingChecker directBlockingChecker
* @param useSimpleCore useSimpleCore
*/
public AnywhereValidatedBlocking(DirectBlockingChecker directBlockingChecker,boolean useSimpleCore) {
m_directBlockingChecker=directBlockingChecker;
m_currentBlockersCache=new ValidatedBlockersCache(m_directBlockingChecker);
m_useSimpleCore=useSimpleCore;
}
@Override
public void initialize(Tableau tableau) {
m_tableau=tableau;
m_directBlockingChecker.initialize(tableau);
m_permanentBlockingValidator=new BlockingValidator(m_tableau,m_tableau.getPermanentDLOntology().getDLClauses());
updateAdditionalBlockingValidator();
}
@Override
public void additionalDLOntologySet(DLOntology additionalDLOntology) {
updateAdditionalBlockingValidator();
}
@Override
public void additionalDLOntologyCleared() {
updateAdditionalBlockingValidator();
}
protected void updateAdditionalBlockingValidator() {
if (m_tableau.getAdditionalHyperresolutionManager()==null)
m_additionalBlockingValidator=null;
else
m_additionalBlockingValidator=new BlockingValidator(m_tableau,m_tableau.getAdditionalDLOntology().getDLClauses());
}
@Override
public void clear() {
m_currentBlockersCache.clear();
m_firstChangedNode=null;
m_directBlockingChecker.clear();
m_lastValidatedUnchangedNode=null;
m_permanentBlockingValidator.clear();
if (m_additionalBlockingValidator!=null)
m_additionalBlockingValidator.clear();
}
@Override
public void computeBlocking(boolean finalChance) {
if (finalChance) {
validateBlocks();
}
else {
computePreBlocking();
}
}
/**
* Compute preblocking.
*/
public void computePreBlocking() {
if (m_firstChangedNode!=null) {
Node node=m_firstChangedNode;
while (node!=null) {
m_currentBlockersCache.removeNode(node);
node=node.getNextTableauNode();
}
node=m_firstChangedNode;
while (node!=null) {
if (node.isActive() && (m_directBlockingChecker.canBeBlocked(node) || m_directBlockingChecker.canBeBlocker(node))) {
if (m_directBlockingChecker.hasBlockingInfoChanged(node) || !node.isDirectlyBlocked() || node.getBlocker().getNodeID()>=m_firstChangedNode.getNodeID()) {
Node parent=node.getParent();
if (parent==null)
node.setBlocked(null,false);
else if (parent.isBlocked())
node.setBlocked(parent,false);
else {
Node blocker=null;
if (m_lastValidatedUnchangedNode==null)
blocker=m_currentBlockersCache.getBlocker(node);
else {
// after a validation has been done, only re-block if something has been modified
Node previousBlocker=node.getBlocker();
boolean nodeModified=m_directBlockingChecker.hasChangedSinceValidation(node);
for (Node possibleBlocker : m_currentBlockersCache.getPossibleBlockers(node)) {
if (nodeModified || m_directBlockingChecker.hasChangedSinceValidation(possibleBlocker) || previousBlocker==possibleBlocker) {
blocker=possibleBlocker;
break;
}
}
}
node.setBlocked(blocker,blocker!=null);
}
}
if (!node.isBlocked() && m_directBlockingChecker.canBeBlocker(node))
m_currentBlockersCache.addNode(node);
}
m_directBlockingChecker.clearBlockingInfoChanged(node);
node=node.getNextTableauNode();
}
m_firstChangedNode=null;
}
}
/**
* Validate blocks.
*/
public void validateBlocks() {
// statistics for debugging:
boolean debuggingMode=false;
int checkedBlocks=0;
int invalidBlocks=0;
TableauMonitor monitor=m_tableau.getTableauMonitor();
if (monitor!=null)
monitor.blockingValidationStarted();
Node node;
node=m_lastValidatedUnchangedNode==null ? m_tableau.getFirstTableauNode() : m_lastValidatedUnchangedNode;
Node firstValidatedNode=node;
while (node!=null) {
m_currentBlockersCache.removeNode(node);
node=node.getNextTableauNode();
}
node=firstValidatedNode;
if (debuggingMode)
System.out.print("Model size: "+(m_tableau.getNumberOfNodesInTableau()-m_tableau.getNumberOfMergedOrPrunedNodes())+" Current ID:");
Node firstInvalidlyBlockedNode=null;
while (node!=null) {
if (node.isActive()) {
if (node.isBlocked()) { // && node.hasUnprocessedExistentials()
checkedBlocks++;
// check whether the block is a correct one
if ((node.isDirectlyBlocked() && (m_directBlockingChecker.hasChangedSinceValidation(node) || m_directBlockingChecker.hasChangedSinceValidation(node.getParent()) || m_directBlockingChecker.hasChangedSinceValidation(node.getBlocker()))) || !node.getParent().isBlocked()) {
Node validBlocker=null;
Node currentBlocker=node.getBlocker();
if (node.isDirectlyBlocked() && currentBlocker!=null) {
// try the old blocker fist
if (isBlockValid(node))
validBlocker=currentBlocker;
}
if (validBlocker==null) {
for (Node possibleBlocker : m_currentBlockersCache.getPossibleBlockers(node)) {
if (possibleBlocker!=currentBlocker) {
node.setBlocked(possibleBlocker,true);
m_permanentBlockingValidator.blockerChanged(node); // invalidate cache
if (m_additionalBlockingValidator!=null)
m_additionalBlockingValidator.blockerChanged(node);
if (isBlockValid(node)) {
validBlocker=possibleBlocker;
break;
}
}
}
}
if (validBlocker==null && node.hasUnprocessedExistentials()) {
invalidBlocks++;
if (firstInvalidlyBlockedNode==null)
firstInvalidlyBlockedNode=node;
}
node.setBlocked(validBlocker,validBlocker!=null);
}
}
m_lastValidatedUnchangedNode=node;
if (!node.isBlocked() && m_directBlockingChecker.canBeBlocker(node))
m_currentBlockersCache.addNode(node);
}
node=node.getNextTableauNode();
}
node=firstValidatedNode;
while (node!=null) {
if (node.isActive()) {
m_directBlockingChecker.setHasChangedSinceValidation(node,false);
ValidatedBlockingObject blockingObject=(ValidatedBlockingObject)node.getBlockingObject();
blockingObject.setBlockViolatesParentConstraints(false);
blockingObject.setHasAlreadyBeenChecked(false);
}
node=node.getNextTableauNode();
}
// if set to some node, then computePreblocking will be asked to check from that node onwards in case of invalid blocks
m_firstChangedNode=firstInvalidlyBlockedNode;
if (monitor!=null)
monitor.blockingValidationFinished(invalidBlocks);
if (debuggingMode) {
System.out.println("");
System.out.println("Checked "+checkedBlocks+" blocked nodes of which "+invalidBlocks+" were invalid.");
}
}
protected boolean isBlockValid(Node node) {
if (m_permanentBlockingValidator.isBlockValid(node)) {
if (m_additionalBlockingValidator!=null)
return m_additionalBlockingValidator.isBlockValid(node);
else
return true;
}
else
return false;
}
@Override
public boolean isPermanentAssertion(Concept concept,Node node) {
return true;
}
@Override
public boolean isPermanentAssertion(DataRange range,Node node) {
return true;
}
protected void validationInfoChanged(Node node) {
if (node!=null) {
if (m_lastValidatedUnchangedNode!=null && node.getNodeID()=node.getNodeID())
m_firstChangedNode=null;
if (m_lastValidatedUnchangedNode!=null && node.getNodeID() workers,DLClause dlClause,List variables,Object[] valuesBuffer,boolean[] coreVariables) {
if (m_useSimpleCore) {
for (int i=0;i1) {
for (int i=0;i1) {
workers.add(new ComputeCoreVariables(valuesBuffer,coreVariables));
}
}
}
}
protected static final class ComputeCoreVariables implements DLClauseEvaluator.Worker,Serializable {
private static final long serialVersionUID=899293772370136783L;
protected final Object[] m_valuesBuffer;
protected final boolean[] m_coreVariables;
public ComputeCoreVariables(Object[] valuesBuffer,boolean[] coreVariables) {
m_valuesBuffer=valuesBuffer;
m_coreVariables=coreVariables;
}
public void clear() {
}
@Override
public int execute(int programCounter) {
Node potentialNonCore=null;
// find the root of the subtree induced by the mapped nodes that node cannot be core
for (int variableIndex=m_coreVariables.length-1;variableIndex>=0;--variableIndex) {
Node node=(Node)m_valuesBuffer[variableIndex];
if (node.getNodeType()==NodeType.TREE_NODE && (potentialNonCore==null || node.getTreeDepth()=0;--variableIndex) {
Node node=(Node)m_valuesBuffer[variableIndex];
if (!node.isRootNode() && potentialNonCore!=node && potentialNonCore.getTreeDepth()();
entry.m_hashCode=0;
m_emptyEntries=entry;
m_numberOfElements--;
}
else {
if (entry.m_nodes.contains(node)) {
for (int i=entry.m_nodes.size()-1;i>=entry.m_nodes.indexOf(node);i--) {
entry.m_nodes.get(i).setBlockingCargo(null);
}
entry.m_nodes.subList(entry.m_nodes.indexOf(node),entry.m_nodes.size()).clear();
}
else {
throw new IllegalStateException("Internal error: entry not in cache!");
}
}
return true;
}
lastEntry=entry;
entry=entry.m_nextEntry;
}
throw new IllegalStateException("Internal error: entry not in cache!");
}
return false;
}
public void addNode(Node node) {
int hashCode=m_directBlockingChecker.blockingHashCode(node);
int bucketIndex=getIndexFor(hashCode,m_buckets.length);
CacheEntry entry=m_buckets[bucketIndex];
while (entry!=null) {
if (hashCode==entry.m_hashCode && m_directBlockingChecker.isBlockedBy(entry.m_nodes.get(0),node)) {
if (!entry.m_nodes.contains(node)) {
entry.add(node);
node.setBlockingCargo(entry);
return;
}
else {
throw new IllegalStateException("Internal error: node already in the cache!");
}
}
entry=entry.m_nextEntry;
}
if (m_emptyEntries==null)
entry=new CacheEntry();
else {
entry=m_emptyEntries;
m_emptyEntries=m_emptyEntries.m_nextEntry;
}
entry.initialize(node,hashCode,m_buckets[bucketIndex]);
m_buckets[bucketIndex]=entry;
// When a node is added to the cache, we record with the node the entry.
// This is used to remove nodes from the cache. Note that changes to a node
// can affect its label. Therefore, we CANNOT remove a node by taking its present
// blocking hash-code, as this can be different from the hash-code used at the
// time the node has been added to the cache.
node.setBlockingCargo(entry);
m_numberOfElements++;
if (m_numberOfElements>=m_threshold)
resize(m_buckets.length*2);
}
protected void resize(int newCapacity) {
CacheEntry[] newBuckets=new CacheEntry[newCapacity];
for (int i=0;i getPossibleBlockers(Node node) {
if (m_directBlockingChecker.canBeBlocked(node)) {
int hashCode=m_directBlockingChecker.blockingHashCode(node);
int bucketIndex=getIndexFor(hashCode,m_buckets.length);
CacheEntry entry=m_buckets[bucketIndex];
while (entry!=null) {
if (hashCode==entry.m_hashCode && m_directBlockingChecker.isBlockedBy(entry.m_nodes.get(0),node)) {
assert !entry.m_nodes.contains(node); // we try to block a node that is in the cache
return entry.m_nodes;
}
entry=entry.m_nextEntry;
}
}
return new ArrayList<>();
}
protected static int getIndexFor(int _hashCode,int tableLength) {
int hashCode=_hashCode;
hashCode+=~(hashCode<<9);
hashCode^=(hashCode>>>14);
hashCode+=(hashCode<<4);
hashCode^=(hashCode>>>10);
return hashCode&(tableLength-1);
}
@Override
public String toString() {
StringBuilder buckets= new StringBuilder();
for (int i=0;i m_nodes;
protected int m_hashCode;
protected CacheEntry m_nextEntry;
public void initialize(Node node,int hashCode,CacheEntry nextEntry) {
m_nodes=new ArrayList<>();
add(node);
m_hashCode=hashCode;
m_nextEntry=nextEntry;
}
public boolean add(Node node) {
for (Node n : m_nodes) {
assert n.getNodeID()<=node.getNodeID();
}
return m_nodes.add(node);
}
@Override
public String toString() {
return m_nodes.stream().map(Object::toString).collect(Collectors.joining(" ", "HashCode: "+m_hashCode+" Nodes: ", ""));
}
}
}