org.semanticweb.HermiT.datalog.ConjunctiveQuery Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of org.semanticweb.hermit Show documentation
Show all versions of org.semanticweb.hermit Show documentation
HermiT is reasoner for ontologies written using the Web Ontology Language (OWL). Given an OWL file, HermiT can determine whether or not the ontology is consistent, identify subsumption relationships between classes, and much more.
This is the maven build of HermiT and is designed for people who wish to use HermiT from within the OWL API. It is now versioned in the main HermiT version repository, although not officially supported by the HermiT developers.
The version number of this package is a composite of the HermiT version and a value representing the OWLAPI release it is compatible with. Note that the group id for the upstream HermiT is com.hermit-reasoner, while this fork is released under net.sourceforge.owlapi.
This fork exists to allow HermiT users to use newer OWLAPI versions than the ones supported by the original HermiT codebase.
This package includes the Jautomata library (http://jautomata.sourceforge.net/), and builds with it directly. This library appears to be no longer under active development, and so a "fork" seems appropriate. No development is intended or anticipated on this code base.
The newest version!
package org.semanticweb.HermiT.datalog;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.semanticweb.HermiT.model.Atom;
import org.semanticweb.HermiT.model.DLClause;
import org.semanticweb.HermiT.model.Term;
import org.semanticweb.HermiT.model.Variable;
import org.semanticweb.HermiT.tableau.DLClauseEvaluator;
import org.semanticweb.HermiT.tableau.DLClauseEvaluator.Worker;
import org.semanticweb.HermiT.tableau.DependencySet;
import org.semanticweb.HermiT.tableau.ExtensionTable;
import org.semanticweb.HermiT.tableau.ExtensionTable.View;
import org.semanticweb.HermiT.tableau.HyperresolutionManager;
import org.semanticweb.HermiT.tableau.Node;
/**
* Conjunctinve query.
*/
public class ConjunctiveQuery {
protected final DatalogEngine m_datalogEngine;
protected final Atom[] m_queryAtoms;
protected final Term[] m_answerTerms;
protected final Term[] m_resultBuffer;
protected final OneEmptyTupleRetrieval m_firstRetrieval;
protected final QueryResultCollector[] m_queryResultCollector;
protected final Worker[] m_workers;
/**
* @param datalogEngine datalog engine
* @param queryAtoms query atoms
* @param answerTerms answer terms
*/
public ConjunctiveQuery(DatalogEngine datalogEngine,Atom[] queryAtoms,Term[] answerTerms) {
if (!datalogEngine.materialize())
throw new IllegalStateException("The supplied DL ontology is unsatisfiable.");
m_datalogEngine=datalogEngine;
m_queryAtoms=queryAtoms;
m_answerTerms=answerTerms;
m_resultBuffer=answerTerms.clone();
m_firstRetrieval=new OneEmptyTupleRetrieval();
m_queryResultCollector=new QueryResultCollector[1];
HyperresolutionManager.BodyAtomsSwapper swapper=new HyperresolutionManager.BodyAtomsSwapper(DLClause.create(new Atom[0],queryAtoms));
DLClause queryDLClause=swapper.getSwappedDLClause(0);
QueryCompiler queryCompiler=new QueryCompiler(this,queryDLClause,answerTerms,datalogEngine.m_termsToNodes,datalogEngine.m_nodesToTerms,m_resultBuffer,m_queryResultCollector,m_firstRetrieval);
m_workers=new Worker[queryCompiler.m_workers.size()];
queryCompiler.m_workers.toArray(m_workers);
}
/**
* @return datalog engine
*/
public DatalogEngine getDatalogEngine() {
return m_datalogEngine;
}
/**
* @return number of query atoms
*/
public int getNumberOfQUeryAtoms() {
return m_queryAtoms.length;
}
/**
* @return number of anser terms
*/
public int getNumberOfAnswerTerms() {
return m_answerTerms.length;
}
/**
* @param queryResultCollector queryResultCollector
*/
public void evaluate(QueryResultCollector queryResultCollector) {
try {
m_queryResultCollector[0]=queryResultCollector;
m_firstRetrieval.open();
int programCounter=0;
while (programCounter m_nodesToTerms;
protected final Term[] m_resultBuffer;
protected final QueryResultCollector[] m_queryResultCollector;
protected final int[][] m_copyAnswers;
protected final Object[] m_valuesBuffer;
/**
* @param conjunctiveQuery query
* @param nodesToTerms nodes to terms
* @param resultBuffer results
* @param queryResultCollector collector
* @param copyAnswers answers
* @param valuesBuffer value buffer
*/
public QueryAnswerCallback(ConjunctiveQuery conjunctiveQuery,Map nodesToTerms,Term[] resultBuffer,QueryResultCollector[] queryResultCollector,int[][] copyAnswers,Object[] valuesBuffer) {
m_conjunctiveQuery=conjunctiveQuery;
m_nodesToTerms=nodesToTerms;
m_resultBuffer=resultBuffer;
m_queryResultCollector=queryResultCollector;
m_copyAnswers=copyAnswers;
m_valuesBuffer=valuesBuffer;
}
@Override
public int execute(int programCounter) {
for (int copyIndex=m_copyAnswers.length-1;copyIndex>=0;--copyIndex)
m_resultBuffer[m_copyAnswers[copyIndex][1]]=m_nodesToTerms.get(m_valuesBuffer[m_copyAnswers[copyIndex][0]]);
m_queryResultCollector[0].processResult(m_conjunctiveQuery,m_resultBuffer);
return programCounter+1;
}
@Override
public String toString() {
return "Call query consumer";
}
}
protected static final class QueryCompiler extends DLClauseEvaluator.ConjunctionCompiler {
protected final ConjunctiveQuery m_conjunctiveQuery;
protected final Term[] m_answerTerms;
protected final Map m_nodesToTerms;
protected final Term[] m_resultBuffer;
protected final QueryResultCollector[] m_queryResultCollector;
public QueryCompiler(ConjunctiveQuery conjunctiveQuery,DLClause queryDLClause,Term[] answerTerms,Map termsToNodes,Map nodesToTerms,Term[] resultBuffer,QueryResultCollector[] queryResultCollector,ExtensionTable.Retrieval oneEmptyTupleRetrieval) {
super(new DLClauseEvaluator.BufferSupply(),new DLClauseEvaluator.ValuesBufferManager(Collections.singleton(queryDLClause),termsToNodes),null,conjunctiveQuery.m_datalogEngine.m_extensionManager,queryDLClause.getBodyAtoms(),getAnswerVariables(answerTerms));
m_conjunctiveQuery=conjunctiveQuery;
m_answerTerms=answerTerms;
m_nodesToTerms=nodesToTerms;
m_resultBuffer=resultBuffer;
m_queryResultCollector=queryResultCollector;
generateCode(0,oneEmptyTupleRetrieval);
}
@Override
protected void compileHeads() {
List copyAnswers=new ArrayList<>();
for (int index=0;index getAnswerVariables(Term[] answerTerms) {
List result=new ArrayList<>();
for (Term answerTerm : answerTerms)
if (answerTerm instanceof Variable)
result.add((Variable)answerTerm);
return result;
}
}
}