org.semanticweb.HermiT.datalog.ConjunctiveQuery Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of org.semanticweb.hermit Show documentation
Show all versions of org.semanticweb.hermit Show documentation
HermiT is reasoner for ontologies written using the Web
Ontology Language (OWL). Given an OWL file, HermiT can determine whether or
not the ontology is consistent, identify subsumption relationships between
classes, and much more.
This is the maven build of HermiT and is designed for people who wish to use
HermiT from within the OWL API. It is now versioned in the main HermiT
version repository, although not officially supported by the HermiT
developers.
The version number of this package is a composite of the HermiT version and
an value representing releases of this packaged version. So, 1.3.7.1 is the
first release of the mavenized version of HermiT based on the 1.3.7 release
of HermiT.
This package includes the Jautomata library
(http://jautomata.sourceforge.net/), and builds with it directly. This
library appears to be no longer under active development, and so a "fork"
seems appropriate. No development is intended or anticipated on this code
base.
package org.semanticweb.HermiT.datalog;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.semanticweb.HermiT.model.Atom;
import org.semanticweb.HermiT.model.DLClause;
import org.semanticweb.HermiT.model.Term;
import org.semanticweb.HermiT.model.Variable;
import org.semanticweb.HermiT.tableau.DLClauseEvaluator;
import org.semanticweb.HermiT.tableau.DLClauseEvaluator.Worker;
import org.semanticweb.HermiT.tableau.DependencySet;
import org.semanticweb.HermiT.tableau.ExtensionTable;
import org.semanticweb.HermiT.tableau.ExtensionTable.View;
import org.semanticweb.HermiT.tableau.HyperresolutionManager;
import org.semanticweb.HermiT.tableau.Node;
public class ConjunctiveQuery {
protected final DatalogEngine m_datalogEngine;
protected final Atom[] m_queryAtoms;
protected final Term[] m_answerTerms;
protected final Term[] m_resultBuffer;
protected final OneEmptyTupleRetrieval m_firstRetrieval;
protected final QueryResultCollector[] m_queryResultCollector;
protected final Worker[] m_workers;
public ConjunctiveQuery(DatalogEngine datalogEngine,Atom[] queryAtoms,Term[] answerTerms) {
if (!datalogEngine.materialize())
throw new IllegalStateException("The supplied DL ontology is unsatisfiable.");
m_datalogEngine=datalogEngine;
m_queryAtoms=queryAtoms;
m_answerTerms=answerTerms;
m_resultBuffer=answerTerms.clone();
m_firstRetrieval=new OneEmptyTupleRetrieval();
m_queryResultCollector=new QueryResultCollector[1];
HyperresolutionManager.BodyAtomsSwapper swapper=new HyperresolutionManager.BodyAtomsSwapper(DLClause.create(new Atom[0],queryAtoms));
DLClause queryDLClause=swapper.getSwappedDLClause(0);
QueryCompiler queryCompiler=new QueryCompiler(this,queryDLClause,answerTerms,datalogEngine.m_termsToNodes,datalogEngine.m_nodesToTerms,m_resultBuffer,m_queryResultCollector,m_firstRetrieval);
m_workers=new Worker[queryCompiler.m_workers.size()];
queryCompiler.m_workers.toArray(m_workers);
}
public DatalogEngine getDatalogEngine() {
return m_datalogEngine;
}
public int getNumberOfQUeryAtoms() {
return m_queryAtoms.length;
}
public Atom getQueryAtom(int atomIndex) {
return m_queryAtoms[atomIndex];
}
public int getNumberOfAnswerTerms() {
return m_answerTerms.length;
}
public Term getAnswerTerm(int termIndex) {
return m_answerTerms[termIndex];
}
public void evaluate(QueryResultCollector queryResultCollector) {
try {
m_queryResultCollector[0]=queryResultCollector;
m_firstRetrieval.open();
int programCounter=0;
while (programCounter m_nodesToTerms;
protected final Term[] m_resultBuffer;
protected final QueryResultCollector[] m_queryResultCollector;
protected final int[][] m_copyAnswers;
protected final Object[] m_valuesBuffer;
public QueryAnswerCallback(ConjunctiveQuery conjunctiveQuery,Map nodesToTerms,Term[] resultBuffer,QueryResultCollector[] queryResultCollector,int[][] copyAnswers,Object[] valuesBuffer) {
m_conjunctiveQuery=conjunctiveQuery;
m_nodesToTerms=nodesToTerms;
m_resultBuffer=resultBuffer;
m_queryResultCollector=queryResultCollector;
m_copyAnswers=copyAnswers;
m_valuesBuffer=valuesBuffer;
}
public int execute(int programCounter) {
for (int copyIndex=m_copyAnswers.length-1;copyIndex>=0;--copyIndex)
m_resultBuffer[m_copyAnswers[copyIndex][1]]=m_nodesToTerms.get((Node)m_valuesBuffer[m_copyAnswers[copyIndex][0]]);
m_queryResultCollector[0].processResult(m_conjunctiveQuery,m_resultBuffer);
return programCounter+1;
}
public String toString() {
return "Call query consumer";
}
}
protected static final class QueryCompiler extends DLClauseEvaluator.ConjunctionCompiler {
protected final ConjunctiveQuery m_conjunctiveQuery;
protected final Term[] m_answerTerms;
protected final Map m_nodesToTerms;
protected final Term[] m_resultBuffer;
protected final QueryResultCollector[] m_queryResultCollector;
public QueryCompiler(ConjunctiveQuery conjunctiveQuery,DLClause queryDLClause,Term[] answerTerms,Map termsToNodes,Map nodesToTerms,Term[] resultBuffer,QueryResultCollector[] queryResultCollector,ExtensionTable.Retrieval oneEmptyTupleRetrieval) {
super(new DLClauseEvaluator.BufferSupply(),new DLClauseEvaluator.ValuesBufferManager(Collections.singleton(queryDLClause),termsToNodes),null,conjunctiveQuery.m_datalogEngine.m_extensionManager,queryDLClause.getBodyAtoms(),getAnswerVariables(answerTerms));
m_conjunctiveQuery=conjunctiveQuery;
m_answerTerms=answerTerms;
m_nodesToTerms=nodesToTerms;
m_resultBuffer=resultBuffer;
m_queryResultCollector=queryResultCollector;
generateCode(0,oneEmptyTupleRetrieval);
}
protected void compileHeads() {
List copyAnswers=new ArrayList();
for (int index=0;index getAnswerVariables(Term[] answerTerms) {
List result=new ArrayList();
for (Term answerTerm : answerTerms)
if (answerTerm instanceof Variable)
result.add((Variable)answerTerm);
return result;
}
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy