it.unife.endif.ml.bundle.Bundle Maven / Gradle / Ivy
/**
* This file is part of BUNDLE.
*
* BUNDLE is a probabilistic reasoner for OWL 2 ontologies.
*
* BUNDLE can be used both as module and as standalone.
*
* LEAP was implemented as a plugin of DL-Learner http://dl-learner.org,
* but some components can be used as stand-alone.
*
* BUNDLE and all its parts are distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see .
*
*/
package it.unife.endif.ml.bundle;
import it.unife.endif.ml.bundle.utilities.QueryResult;
import com.clarkparsia.owlapi.explanation.BlackBoxExplanation;
import com.clarkparsia.owlapi.explanation.GlassBoxExplanation;
import com.clarkparsia.owlapi.explanation.MultipleExplanationGenerator;
import com.clarkparsia.owlapi.explanation.SatisfiabilityConverter;
import com.clarkparsia.owlapi.explanation.TransactionAwareSingleExpGen;
import com.clarkparsia.owlapiv3.OWL;
import com.clarkparsia.owlapiv3.OntologyUtils;
import com.clarkparsia.pellet.owlapiv3.PelletReasoner;
import com.clarkparsia.pellet.owlapiv3.PelletReasonerFactory;
import java.io.PrintStream;
//import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import com.clarkparsia.owlapiv3.OWL;
import net.sf.javabdd.BDD;
import net.sf.javabdd.BDDFactory;
import net.sf.javabdd.JFactory;
import org.apache.log4j.Logger;
import org.semanticweb.owlapi.util.mansyntax.ManchesterOWLSyntaxParser;
import org.mindswap.pellet.PelletOptions;
import org.mindswap.pellet.utils.Timer;
import org.mindswap.pellet.utils.Timers;
import org.mindswap.pellet.utils.progress.ConsoleProgressMonitor;
import org.mindswap.pellet.utils.progress.ProgressMonitor;
import org.semanticweb.owlapi.apibinding.OWLManager;
import org.semanticweb.owlapi.manchestersyntax.renderer.ParserException;
import org.semanticweb.owlapi.model.IRI;
import org.semanticweb.owlapi.model.OWLAxiom;
import org.semanticweb.owlapi.model.OWLClass;
import org.semanticweb.owlapi.model.OWLClassExpression;
import org.semanticweb.owlapi.model.OWLDataProperty;
import org.semanticweb.owlapi.model.OWLEntity;
import org.semanticweb.owlapi.model.OWLException;
import org.semanticweb.owlapi.model.OWLIndividual;
import org.semanticweb.owlapi.model.OWLLiteral;
import org.semanticweb.owlapi.model.OWLNamedIndividual;
import org.semanticweb.owlapi.model.OWLObject;
import org.semanticweb.owlapi.model.OWLObjectProperty;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.model.OWLOntologyCreationException;
import org.semanticweb.owlapi.model.OWLOntologyManager;
import org.semanticweb.owlapi.model.OWLProperty;
import org.semanticweb.owlapi.model.OWLSubClassOfAxiom;
import org.semanticweb.owlapi.reasoner.Node;
import org.semanticweb.owlapi.reasoner.NodeSet;
import pellet.PelletCmdApp;
import pellet.PelletCmdException;
import pellet.PelletCmdOption;
import static pellet.PelletCmdOptionArg.NONE;
import static pellet.PelletCmdOptionArg.REQUIRED;
import pellet.PelletCmdOptions;
import it.unife.endif.ml.bundle.explanation.BundleGlassBoxExplanation;
import it.unife.endif.ml.bundle.explanation.BundleHSTExplanationGenerator;
import it.unife.endif.ml.bundle.logging.BundleLoggerFactory;
import it.unife.endif.ml.bundle.monitor.BundleRendererExplanationProgressMonitor;
import it.unife.endif.ml.bundle.monitor.LogRendererTimeExplanationProgressMonitor;
import it.unife.endif.ml.bundle.monitor.ScreenRendererTimeExplanationProgressMonitor;
import it.unife.endif.ml.bundle.utilities.BundleUtilities;
import it.unife.endif.ml.bundle.bdd.BDDFactory2;
import it.unife.endif.ml.bundle.monitor.SilentRendererTimeExplanationProgressMonitor;
import it.unife.endif.ml.bundle.utilities.ExplanationResult;
import it.unife.endif.ml.bundle.core.ProbabilisticExplanationReasoner;
import it.unife.endif.ml.bundle.exception.OWLOntologyInconsistentException;
import it.unife.endif.ml.math.ApproxDouble;
import it.unife.endif.ml.bundle.exception.ObjectNotInitializedException;
import static it.unife.endif.ml.bundle.general.utilities.GeneralUtils.safe;
import it.unife.endif.ml.bundle.monitor.BundleRendererExplanationProgressMonitor2;
import it.unife.endif.ml.bundle.monitor.ConsoleTimeExplanationProgressMonitor;
import it.unife.endif.ml.bundle.monitor.LogTimeExplanationProgressMonitor;
import it.unife.endif.ml.bundle.monitor.NullTimeExplanationProgressMonitor;
import java.io.File;
import org.semanticweb.owl.explanation.api.ConsoleExplanationProgressMonitor;
import org.semanticweb.owl.explanation.api.Explanation;
import org.semanticweb.owl.explanation.api.ExplanationGenerator;
import org.semanticweb.owl.explanation.api.ExplanationGeneratorFactory;
import org.semanticweb.owl.explanation.api.ExplanationManager;
import org.semanticweb.owl.explanation.api.ExplanationProgressMonitor;
import org.semanticweb.owl.explanation.impl.blackbox.checker.InconsistentOntologyExplanationGeneratorFactory;
import org.semanticweb.owlapi.model.AxiomType;
import org.semanticweb.owlapi.model.OWLOntologyIRIMapper;
import org.semanticweb.owlapi.model.OWLRuntimeException;
import org.semanticweb.owlapi.model.parameters.Imports;
import org.semanticweb.owlapi.reasoner.OWLReasoner;
import org.semanticweb.owlapi.util.PriorityCollection;
/**
* Bundle's core class. Extends PelletCmdApp class from the Pellet code and
* allows the computation of the given queries.
*
* @author Riccardo Zese , Giuseppe Cota
*
*/
public class Bundle extends PelletCmdApp implements ProbabilisticExplanationReasoner {
private SatisfiabilityConverter converter;
private static final Logger logger = Logger.getLogger(Bundle.class.getName(), new BundleLoggerFactory());
/**
* inferences for which there was an error while generating the explanation
*/
private int errorExpCount = 0;
private int maxExplanations = Integer.MAX_VALUE;
private int reasoningTimeout = 0;
private boolean useBlackBox = false;
private ProgressMonitor monitor;
/**
* inferences whose explanation contains more than on axiom
*/
private int multiAxiomExpCount = 0;
/**
* inferences with multiple explanations
*/
private int multipleExpCount = 0;
private PelletReasoner reasoner;
private OWLEntity name1;
private OWLEntity name2;
private OWLObject name3;
protected boolean findProbability;
private boolean log = false;
/**
* background ontology
*/
private OWLOntology rootOntology;
/**
* ontology containing the probabilistic target axioms
*/
private String probOntologyName = null;
private Map pMap = null;
//private ApproxDouble[] prob = new ApproxDouble[0];
//private String[] axp = new String[0];
//private OWLAxiom[] axi = new OWLAxiom[0];
// it is necessary to be a list because it must keep a correspondence with the index of the
// variables of the built BDD
private List usedAxioms;
private BDDFactoryType bddFType = BDDFactoryType.BUDDY;
private BDDFactory bddF;
private int accuracy = 5;
private boolean initialized = false;
private boolean showAll = false;
private boolean useOWLExplanationLibrary;
/**
* Sets up the class and the explanation generator.
*/
public Bundle() {
BundleGlassBoxExplanation.setup();
// PelletOptions.USE_ANNOTATION_SUPPORT = true;
// PelletOptions.IGNORE_ANNOTATION_CLASSES = false;
}
@Override
public String getAppId() {
return "Bundle: Explains one or more inferences in a given ontology including ontology inconsistency";
}
@Override
public String getAppCmd() {
return "bundle " + getMandatoryOptions() + "[options] ...\n\n"
+ "The options --unsat, --all-unsat, --inconsistent, --subclass, \n"
+ "--hierarchy, and --instance are mutually exclusive. By default \n "
+ "--inconsistent option is assumed. In the following descriptions \n"
+ "C, D, and i can be URIs or local names.";
}
/**
* Sets up and returns the set of the arguments Bundle accepts.
*
* @return a PelletCmdOption object containing all the argument accepted by
* Bundle
*/
@Override
public PelletCmdOptions getOptions() {
PelletCmdOptions options = getGlobalOptions();
options.add(getIgnoreImportsOption());
PelletCmdOption option = new PelletCmdOption("unsat");
option.setType("C");
option.setDescription("Explain why the given class is unsatisfiable");
option.setIsMandatory(false);
option.setArg(REQUIRED);
options.add(option);
option = new PelletCmdOption("all-unsat");
option.setDescription("Explain all unsatisfiable classes");
option.setDefaultValue(false);
option.setIsMandatory(false);
option.setArg(NONE);
options.add(option);
option = new PelletCmdOption("inconsistent");
option.setDescription("Explain why the ontology is inconsistent");
option.setDefaultValue(false);
option.setIsMandatory(false);
option.setArg(NONE);
options.add(option);
option = new PelletCmdOption("hierarchy");
option.setDescription("Print all explanations for the class hierarchy");
option.setDefaultValue(false);
option.setIsMandatory(false);
option.setArg(NONE);
options.add(option);
option = new PelletCmdOption("subclass");
option.setDescription("Explain why C is a subclass of D");
option.setType("C,D");
option.setIsMandatory(false);
option.setArg(REQUIRED);
options.add(option);
option = new PelletCmdOption("instance");
option.setDescription("Explain why i is an instance of C");
option.setType("i,C");
option.setIsMandatory(false);
option.setArg(REQUIRED);
options.add(option);
option = new PelletCmdOption("property-value");
option.setDescription("Explain why s has value o for property p");
option.setType("s,p,o");
option.setIsMandatory(false);
option.setArg(REQUIRED);
options.add(option);
option = new PelletCmdOption("method");
option.setShortOption("m");
option.setType("glass | black | owlexp");
option.setDescription("Method that will be used to generate explanations");
option.setDefaultValue("glass");
option.setIsMandatory(false);
option.setArg(REQUIRED);
options.add(option);
option = new PelletCmdOption("max");
option.setShortOption("x");
option.setType("positive integer");
option.setDescription("Maximum number of generated explanations for each inference");
option.setDefaultValue(Integer.MAX_VALUE);
option.setIsMandatory(false);
option.setArg(REQUIRED);
options.add(option);
option = new PelletCmdOption("time");
option.setShortOption("t");
option.setDescription("Maximum time allowed for the inference, 0 for unlimited time. Format: [Xh][Ym][Zs][Kms]");
option.setDefaultValue("0s");
option.setIsMandatory(false);
option.setArg(REQUIRED);
options.add(option);
option = new PelletCmdOption("noProb");
option.setDescription("Disable the computation of the probability");
option.setDefaultValue(false);
option.setIsMandatory(false);
option.setArg(NONE);
options.add(option);
option = new PelletCmdOption("log");
option.setDescription("Write on log instead of on screen");
option.setDefaultValue(false);
option.setIsMandatory(false);
option.setArg(NONE);
options.add(option);
option = new PelletCmdOption("bddfact");
option.setShortOption("bf");
option.setDescription("Set the BDD Factory, possible values can be "
+ "\"buddy\", \"cudd\", \"j\", \"java\", \"jdd\", \"test\", "
+ "\"typed\", or a name of a class that has an init() method "
+ "that returns a BDDFactory. Note: \"cal\" is not supported, "
+ "use \"buddy\" for better performances. If the loading fails "
+ "I will use \"java\" factory.");
option.setDefaultValue("buddy");
option.setIsMandatory(false);
option.setArg(REQUIRED);
options.add(option);
option = options.getOption("verbose");
option.setDescription("Print detailed exceptions and messages about the progress");
return options;
}
/**
* Parses the array containing the arguments and configures Bundle.
*
* @param args the array of arguments
*/
@Override
public void parseArgs(String[] args) {
// Shift of the arguments because PelletCmdApp reads args array from the position 1.
String[] argsNew = new String[args.length + 1];
argsNew[0] = "BUNDLE";
int argsNewIdx = 1;
for (String arg : args) {
if (!arg.equals("-v") && !arg.equals("--verbose")) {
argsNew[argsNewIdx] = arg;
argsNewIdx++;
} else {
verbose = true;
showAll = true;
}
}
//System.arraycopy(args, 0, argsNew, 1, args.length);
//System.arraycopy(new String[]{"null", "null"}, 0, argsNew, argsNew.length, 2);
args = argsNew;
super.parseArgs(args);
String bddFactoryName = options.getOption("bddfact").getValueAsString();
if (bddFactoryName.equals("cal")) {
if (log) {
logger.info("CAL is not supported as BDD Factory. I will use BuDDy!");
}
System.out.println("CAL is not supported as BDD Factory. I will use BuDDy!");
} else {
setBddFType(BDDFactoryType.valueOf(bddFactoryName.toUpperCase()));
}
setMaxExplanations(options.getOption("max").getValueAsNonNegativeInteger());
setReasoningTimeout(BundleUtilities.convertTimeValue(options.getOption("time").getValueAsString()));
findProbability = !options.getOption("noProb").getValueAsBoolean();
setLog(options.getOption("log").getValueAsBoolean());
if (verbose) {
if (getMaxExplanations() != Integer.MAX_VALUE) {
if (log) {
logger.info("Max Explanations: " + getMaxExplanations());
} else {
System.out.println("Max Explanations: " + getMaxExplanations());
}
}
if (getReasoningTimeout() != 0) {
if (log) {
logger.info("Max Execution Time: " + options.getOption("time").getValue() + " (" + getReasoningTimeout() + ")");
}
System.out.println("Max Execution Time: " + options.getOption("time").getValue() + " (" + getReasoningTimeout() + ")");
}
if (findProbability != true) {
if (log) {
logger.info("No probability computation");
}
System.out.println("No probability computation");
}
}
// Here we handle only the case where we have one single ontology file because the second one should be the probabilistic one.
// Maybe we can unify all the onologies or use a new argument for the probabilistic file
String inputFiles = getInputFiles()[0];
// Load ontology file
loadOntologies(inputFiles);
if (getInputFiles().length > 1) {
probOntologyName = getInputFiles()[1];
}
init();
}
/**
* Loads the ontology given as argument.
*/
public void loadOntologies() {
loadOntologies("");
}
/**
* Loads the ontology given as argument or as parameter of the method. If
* the argument is null, the method loads the ontology given by the
* arguments, otherwise loads the given ontology
*
* @param inputFiles null or the IRI of the ontology file
*/
public void loadOntologies(String inputFiles) {
if (inputFiles == null || inputFiles.isEmpty()) {
inputFiles = getInputFiles()[0];
}
try {
OWLOntology ontology = OWLManager.createOWLOntologyManager().loadOntologyFromOntologyDocument(IRI.create(inputFiles));
setRootOntology(ontology);
} catch (OWLOntologyCreationException e) {
throw new RuntimeException(e.getMessage());
}
//converter = new SatisfiabilityConverter(loader.getManager().getOWLDataFactory());
//reasoner = loader.getReasoner();
//converter = new SatisfiabilityConverter(baseOntology.getOWLOntologyManager().getOWLDataFactory());
//reasoner = new PelletReasonerFactory().createReasoner(baseOntology);
}
/**
*
* @param baseOntology
* @deprecated use
* {@link #setRootOntology(org.semanticweb.owlapi.model.OWLOntology)}
* instead.
*/
@Deprecated
public void loadOntologies(OWLOntology baseOntology) {
this.rootOntology = baseOntology;
converter = new SatisfiabilityConverter(this.rootOntology.getOWLOntologyManager().getOWLDataFactory());
reasoner = new PelletReasonerFactory().createNonBufferingReasoner(this.rootOntology);
// reasoner = PelletReasonerFactory.getInstance().createReasoner(this.baseOntology);
}
// protected KnowledgeBase getKB(KBLoader loader, int file) {
// try {
// String[] inputFiles = getInputFiles();
//
// verbose("There are " + 1 + " input files:");
// verbose(inputFiles[file]);
//
//
// startTask("loading");
// KnowledgeBase kb = loader.createKB(inputFiles[file]);
// finishTask("loading");
//
// if (verbose) {
// StringBuilder sb = new StringBuilder();
// sb.append("Classes = " + kb.getAllClasses().size() + ", ");
// sb.append("Properties = " + kb.getProperties().size() + ", ");
// sb.append("Individuals = " + kb.getIndividuals().size());
// verbose("Input size: " + sb);
//
// verbose("Expressivity: " + kb.getExpressivity());
// }
//
// return kb;
// } catch (RuntimeException e) {
// throw new PelletCmdException(e);
// }
// }
/**
* Loads the method used for computing explanations.
*/
private void loadMethod() {
String method = options.getOption("method").getValueAsString();
boolean containsDUAxs = rootOntology.getAxioms(AxiomType.DISJOINT_UNION, Imports.INCLUDED).size() > 0;
if (method.equalsIgnoreCase("owlexp") || containsDUAxs) {
// if (method.equalsIgnoreCase("owlexp")) {
useOWLExplanationLibrary = true;
} else {
if (method.equalsIgnoreCase("black")) {
useBlackBox = true;
} else if (method.equalsIgnoreCase("glass")) {
useBlackBox = false;
} else {
throw new PelletCmdException("Unrecognized method: " + method);
}
}
}
/**
* Processes the arguments specifying the query
*/
private void loadNames() {
PelletCmdOption option;
name1 = name2 = null;
name3 = null;
if ((option = options.getOption("hierarchy")) != null) {
if (option.getValueAsBoolean()) {
return;
}
}
if ((option = options.getOption("all-unsat")) != null) {
if (option.getValueAsBoolean()) {
name1 = OWL.Nothing;
return;
}
}
if ((option = options.getOption("inconsistent")) != null) {
if (option.getValueAsBoolean()) {
// if (useBlackBox) {
// throw new PelletCmdException("Black box method cannot be used to explain ontology inconsistency");
// }
name1 = OWL.Thing;
return;
}
}
if ((option = options.getOption("unsat")) != null) {
String unsatisfiable = option.getValueAsString();
if (unsatisfiable != null) {
name1 = OntologyUtils.findEntity(unsatisfiable, rootOntology);
if (name1 == null) {
throw new PelletCmdException("Undefined entity: " + unsatisfiable);
} else if (!name1.isOWLClass()) {
throw new PelletCmdException("Not a defined class: " + unsatisfiable);
} else if (name1.isTopEntity() && useBlackBox) {
throw new PelletCmdException("Black box method cannot be used to explain unsatisfiability of owl:Thing");
}
return;
}
}
if ((option = options.getOption("subclass")) != null) {
String subclass = option.getValueAsString();
if (subclass != null) {
String[] names = subclass.split(",");
if (names.length != 2) {
throw new PelletCmdException(
"Invalid format for subclass option: " + subclass);
}
name1 = OntologyUtils.findEntity(names[0], rootOntology);
name2 = OntologyUtils.findEntity(names[1], rootOntology);
if (name1 == null) {
throw new PelletCmdException("Undefined entity: " + names[0]);
} else if (!name1.isOWLClass()) {
throw new PelletCmdException("Not a defined class: " + names[0]);
}
if (name2 == null) {
throw new PelletCmdException("Undefined entity: " + names[1]);
} else if (!name2.isOWLClass()) {
throw new PelletCmdException("Not a defined class: " + names[1]);
}
return;
}
}
if ((option = options.getOption("instance")) != null) {
String instance = option.getValueAsString();
if (instance != null) {
String[] names = instance.split(",");
if (names.length != 2) {
throw new PelletCmdException("Invalid format for instance option: " + instance);
}
name1 = OntologyUtils.findEntity(names[0], rootOntology);
name2 = OntologyUtils.findEntity(names[1], rootOntology);
if (name1 == null) {
throw new PelletCmdException("Undefined entity: " + names[0]);
} else if (!name1.isOWLNamedIndividual()) {
throw new PelletCmdException("Not a defined individual: " + names[0]);
}
if (name2 == null) {
throw new PelletCmdException("Undefined entity: " + names[1]);
} else if (!name2.isOWLClass()) {
throw new PelletCmdException("Not a defined class: " + names[1]);
}
return;
}
}
if ((option = options.getOption("property-value")) != null) {
String optionValue = option.getValueAsString();
if (optionValue != null) {
String[] names = optionValue.split(",");
if (names.length != 3) {
throw new PelletCmdException("Invalid format for property-value option: " + optionValue);
}
name1 = OntologyUtils.findEntity(names[0], rootOntology);
name2 = OntologyUtils.findEntity(names[1], rootOntology);
if (name1 == null) {
throw new PelletCmdException("Undefined entity: " + names[0]);
} else if (!name1.isOWLNamedIndividual()) {
throw new PelletCmdException("Not an individual: " + names[0]);
}
if (name2 == null) {
throw new PelletCmdException("Undefined entity: " + names[1]);
} else if (!name2.isOWLObjectProperty() && !name2.isOWLDataProperty()) {
throw new PelletCmdException("Not a defined property: " + names[1]);
}
if (name2.isOWLObjectProperty()) {
name3 = OntologyUtils.findEntity(names[2], rootOntology);
if (name3 == null) {
throw new PelletCmdException("Undefined entity: " + names[2]);
} else if (!(name3 instanceof OWLIndividual)) {
throw new PelletCmdException("Not a defined individual: " + names[2]);
}
} else {
ManchesterOWLSyntaxParser parser = OWLManager.createManchesterParser();
// ManchesterOWLSyntaxEditorParser parser = new ManchesterOWLSyntaxEditorParser(
// baseOntology.getOWLOntologyManager().getOWLDataFactory(), names[2]);
try {
name3 = parser.parseLiteral(null);
} catch (ParserException e) {
throw new PelletCmdException("Not a valid literal: " + names[2]);
}
}
return;
}
}
// Per default we explain why the ontology is inconsistent
if (name1 == null) {
name1 = OWL.Thing;
options.getOption("inconsistent").setValue(true);
}
// if (useBlackBox) {
// throw new PelletCmdException("Black box method cannot be used to explain ontology inconsistency");
// }
}
/**
* Instantiates and returns the explanation generator.
*
* @return the explanation generator
*/
private TransactionAwareSingleExpGen getSingleExplanationGenerator() {
if (useBlackBox) {
if (options.getOption("inconsistent") != null) {
if (!options.getOption("inconsistent").getValueAsBoolean()) {
return new BlackBoxExplanation(reasoner.getRootOntology(), PelletReasonerFactory.getInstance(), reasoner);
} else {
output("WARNING: black method cannot be used to explain inconsistency. Switching to glass.");
return new BundleGlassBoxExplanation(reasoner);
// return new GlassBoxExplanation(reasoner);
}
} else {
return new BlackBoxExplanation(reasoner.getRootOntology(), PelletReasonerFactory.getInstance(), reasoner);
}
} else {
return new BundleGlassBoxExplanation(reasoner);
// return new GlassBoxExplanation(reasoner);
}
}
/**
* Prints useful statistics if in verbose mode.
*/
private void printStatistics() {
if (!verbose) {
return;
}
Timer timer = timers.getTimer("explain");
if (timer != null) {
// TODO change with logger????
verbose("");
verbose("========== Statistics ==========");
//verbose("Subclass relations : " + timer.getCount(), log);
verbose("Multiple explanations: " + multipleExpCount, log);
verbose("Single explanation ", log);
verbose(" with multiple axioms: " + multiAxiomExpCount, log);
verbose("Error explaining : " + errorExpCount, log);
verbose("Average time : " + timer.getAverage() + "ms", log);
// if (verbose) {
// StringWriter sw = new StringWriter();
// timers.print(sw, true, null);
//
// verbose("", log);
// verbose("Timer summary:", log);
// String[] lines = sw.toString().split("\n");
// for (String line : lines) {
// verbose(line, log);
// }
// }
}
}
public void dispose() {
finish(true);
}
/**
* Disposes Bundle maintaining the content of the pMap.
*/
@Override
public void finish() {
finish(false);
}
/**
* Disposes Bundle.
*
* @param clearPMap true for cleaning the pMap, false otherwise
*/
public void finish(boolean clearPMap) {
super.finish();
initialized = false;
monitor = null;
loader = null;
// reasoner.flush();
reasoner.dispose();
reasoner = null;
if (clearPMap) {
pMap.clear();
}
pMap = null;
// prob = null;
// axp = null;
// axi = null;
usedAxioms = null;
timers = null;
PelletOptions.USE_TRACING = false;
// disposeBDDFactory();
}
/**
* Resets Bundle.
*/
// public void reset() {
//// prob = new ApproxDouble[0];
//// axp = new String[0];
//// axi = new OWLAxiom[0];
// usedAxioms = new ArrayList<>();
// if (reasoner != null) {
// reasoner.dispose();
// reasoner = null;
// }
// if (converter != null) {
// converter = null;
// }
// loadOntologies();
// this.initialized = false;
// }
/**
* Check whether BUNDLE is initialized
*
* @return true if BUNDLE is initialized, false otherwise
*/
public boolean isInitialized() {
return initialized;
}
/**
* Computes the query given by the arguments.
*
* @return the result of the computation contained in a QueryResult object
*/
public QueryResult computeQuery() {
QueryResult result = null;
Set> explanations = Collections.emptySet();
if (!initialized) {
error("BUNDLE is correctly not initialized.");
} else {
//setPMap();
try {
if (name1 == null) {
// Option --hierarchy
verbose("Explain all the subclass relations in the ontology");
explanations = explainClassHierarchy();
} else if (name2 == null) {
if (((OWLClassExpression) name1).isOWLNothing()) {
//xxx
verbose("Explain all the unsatisfiable classes");
explanations = explainUnsatisfiableClasses();
} else {
// Option --inconsistent && --unsat C
verbose("Explain unsatisfiability of " + name1);
explanations = explainUnsatisfiableClass((OWLClass) name1);
}
} else if (name3 != null) {
// Option --property-value s,p,o
verbose("Explain property assertion " + name1 + " and " + name2 + " and " + name3);
explanations = explainPropertyValue((OWLIndividual) name1, (OWLProperty) name2, name3);
} else if (name1.isOWLClass() && name2.isOWLClass()) {
// Option --subclass C,D
verbose("Explain subclass relation between " + name1 + " and " + name2);
explanations = explainSubClass((OWLClass) name1, (OWLClass) name2);
} else if (name1.isOWLNamedIndividual() && name2.isOWLClass()) {
// Option --instance i,C
verbose("Explain instance relation between " + name1 + " and " + name2);
explanations = explainInstance((OWLIndividual) name1, (OWLClass) name2);
}
//printStatistics();
} catch (OWLException e) {
if (findProbability && getBddF() != null) {
getBddF().done();
}
throw new RuntimeException(e);
}
// if (explanations == null || explanations.isEmpty()) {
// throw new RuntimeException("Explanations not found");
// }
result = computeProbability(explanations);
printResult(result);
if (verbose) {
printStatistics();
}
}
return result;
}
/**
* Computes the given query.
*
* @param query the query to compute
* @return the result of the computation contained in a QueryResult object
* @throws org.semanticweb.owlapi.model.OWLException
*/
@Override
public QueryResult computeQuery(OWLAxiom query) throws OWLException {
if (!initialized) {
String msg = "BUNDLE is not correctly initialized.";
error(msg);
throw new ObjectNotInitializedException(msg);
} else {
ExplanationResult explanations = explainAxiom(query);
QueryResult result = computeProbability(explanations.getExplanations());
return result;
}
}
public QueryResult computeAllUnsatQuery() {
name1 = OWL.Nothing;
QueryResult result = computeQuery();
return result;
}
public QueryResult computeHierarchyQuery() {
QueryResult result = computeQuery();
return result;
}
/**
* Executes a class hierarchy query.
*
* @return the result of the computation contained in a QueryResult object
* @throws OWLException
*/
private Set> explainClassHierarchy() throws OWLException {
Set visited = new HashSet<>();
// reasoner.flush();
startTask("Classification");
reasoner.getKB().classify();
finishTask("Classification");
startTask("Realization");
reasoner.getKB().realize();
finishTask("Realization");
monitor = new ConsoleProgressMonitor();
monitor.setProgressTitle("Explaining");
monitor.setProgressLength(reasoner.getRootOntology().getClassesInSignature().size());
monitor.taskStarted();
Node bottoms = reasoner.getEquivalentClasses(OWL.Nothing);
Set> topsResults = explainClassHierarchy(OWL.Nothing, bottoms, visited);
Node tops = reasoner.getEquivalentClasses(OWL.Thing);
Set> bottomsResults = explainClassHierarchy(OWL.Thing, tops, visited);
monitor.taskFinished();
topsResults.addAll(bottomsResults);
bottomsResults = null; //cleaning
return topsResults;
}
/**
* Computes the explanations for the hierarchy (hierarchy). It starts from
* cls and visits all the classes contained in eqClasses
*
* @param cls starting class
* @param eqClasses Node containing the classes to check
* @param visited the set of visited classes
* @return the result of the computation contained in a QueryResult object
* @throws OWLException
*/
private Set> explainClassHierarchy(OWLClass cls, Node eqClasses, Set visited)
throws OWLException {
Set> explanations = new HashSet<>();
if (visited.contains(cls)) {
return Collections.emptySet();
}
visited.add(cls);
visited.addAll(eqClasses.getEntities());
for (OWLClass eqClass : eqClasses) {
monitor.incrementProgress();
explanations.addAll(explainEquivalentClass(cls, eqClass));
}
for (OWLNamedIndividual ind : reasoner.getInstances(cls, true).getFlattened()) {
explanations.addAll(explainInstance(ind, cls));
}
NodeSet subClasses = reasoner.getSubClasses(cls, true);
Map> subClassEqs = new HashMap<>();
for (Node equivalenceSet : subClasses) {
if (equivalenceSet.isBottomNode()) {
continue;
}
OWLClass subClass = equivalenceSet.getRepresentativeElement();
subClassEqs.put(subClass, equivalenceSet);
explanations.addAll(explainSubClass(subClass, cls));
}
for (Map.Entry> entry : subClassEqs.entrySet()) {
explanations.addAll(explainClassHierarchy(entry.getKey(), entry.getValue(), visited));
}
return explanations;
}
/**
* Searches for unsatisfiable classes (all-unsat). Executes a unsatisfiable
* class query for every class of the ontology
*
* @return the result of the computation contained in a QueryResult object
* @throws OWLException
*/
private Set> explainUnsatisfiableClasses() throws OWLException {
Set> explanations = new HashSet<>();
for (OWLClass cls : reasoner.getEquivalentClasses(OWL.Nothing)) {
if (cls.isOWLNothing()) {
continue;
}
explanations.addAll(explainUnsatisfiableClass(cls));
}
return explanations;
}
/**
* Computes an unsatisfiable class query (unsat).
*
* @param owlClass the class to prove the unsatisfiability
* @return the result of the computation contained in a QueryResult object
* @throws OWLException
*/
private Set> explainUnsatisfiableClass(OWLClass owlClass) throws OWLException {
return explainSubClass(owlClass, OWL.Nothing);
}
/**
* Computes a property value query (property-value).
*
* @param s the subject of a property assertion
* @param p the property of the assertion
* @param o the object of the property assertion
* @return the result of the computation contained in a QueryResult object
* @throws OWLException
*/
// In the following method(s) we intentionally do not use OWLPropertyExpression,?>
// because of a bug in some Sun's implementation of javac
// http://bugs.sun.com/view_bug.do?bug_id=6548436
// Since lack of generic type generates a warning, we suppress it
@SuppressWarnings("unchecked")
private Set> explainPropertyValue(OWLIndividual s, OWLProperty p, OWLObject o) throws OWLException {
if (p.isOWLObjectProperty()) {
return explainAxiom(OWL.propertyAssertion(s, (OWLObjectProperty) p, (OWLIndividual) o)).getExplanations();
} else {
return explainAxiom(OWL.propertyAssertion(s, (OWLDataProperty) p, (OWLLiteral) o)).getExplanations();
}
}
/**
* Computes a "subclass of" query (subclass).
*
* @param sub the subclass of the "subclass of" axiom
* @param sup the super class of the "subclass of" axiom
* @return the result of the computation contained in a QueryResult object
* @throws OWLException
*/
private Set> explainSubClass(OWLClass sub, OWLClass sup) throws OWLException {
OWLSubClassOfAxiom axiom = OWL.subClassOf(sub, sup);
if (sub.equals(sup)) {
return Collections.emptySet();
}
if (sub.isOWLNothing()) {
return Collections.emptySet();
}
if (sup.isOWLThing()) {
return Collections.emptySet();
}
return explainAxiom(axiom).getExplanations();
}
/**
* Computes an "instance of" query (instance).
*
* @param owlIndividual the individual of the class assertion axiom
* @param owlClass the class of the class assertion axiom
* @return the set of explanations
* @throws OWLException
*/
private Set> explainInstance(OWLIndividual owlIndividual, OWLClass owlClass) throws OWLException {
if (owlClass.isOWLThing()) {
return Collections.emptySet();
}
OWLAxiom axiom = OWL.classAssertion(owlIndividual, owlClass);
return explainAxiom(axiom).getExplanations();
}
/**
* Computes an "equivalent of " query. NOTE: a equivalent-of b corresponds
* to a subclass-of b AND b subclass-of a
*
* @param c1 the first class of the equivalent classes axiom
* @param c2 the second class of the equivalent classes axiom
* @return the set of explanations
* @throws OWLException
*/
private Set> explainEquivalentClass(OWLClass c1, OWLClass c2) throws OWLException {
if (c1.equals(c2)) {
return Collections.emptySet();
}
OWLAxiom axiom = OWL.equivalentClasses(c1, c2);
return explainAxiom(axiom).getExplanations();
}
/**
* Computes the explanations of a generic given axiom.
*
* @param axiom the axiom to compute the set of explanations
* @return the set of explanations
* @throws OWLException
*/
public ExplanationResult explainAxiom(OWLAxiom axiom) throws OWLException {
Timer timer = timers.startTimer("explain");
Set> explanations = new HashSet<>();
if (useOWLExplanationLibrary) {
/**
* NEW CODE - BEGIN
*/
BundleRendererExplanationProgressMonitor2 rendererMonitor;
if (showAll) {
if (log) {
rendererMonitor = new LogTimeExplanationProgressMonitor(getReasoningTimeout());
} else {
rendererMonitor = new ConsoleTimeExplanationProgressMonitor(getReasoningTimeout());
}
} else {
rendererMonitor = new NullTimeExplanationProgressMonitor<>();
}
// Create the explanation generator factory which uses reasoners provided by the specified
// reasoner factory
ExplanationGeneratorFactory expGen;
if (options.getOption("inconsistent").getValueAsBoolean()) {
expGen = new InconsistentOntologyExplanationGeneratorFactory(PelletReasonerFactory.getInstance(), reasoningTimeout);
} else {
expGen = ExplanationManager.createExplanationGeneratorFactory(PelletReasonerFactory.getInstance());
}
// start the timeout
rendererMonitor.setParamAndStart(getReasoningTimeout());
// Now create the actual explanation generator for our ontology
ExplanationGenerator gen = expGen.createExplanationGenerator(rootOntology, rendererMonitor);
// Get our explanations.
Set> expl = gen.getExplanations(axiom, maxExplanations);
for (Explanation ex : expl) {
explanations.add(ex.getAxioms());
}
rendererMonitor.stopMonitoring();
if (explanations.isEmpty()) {
rendererMonitor.foundNoExplanations(axiom);
}
/**
* NEW CODE - END
*/
} else {
BundleRendererExplanationProgressMonitor rendererMonitor;
if (showAll) {
if (log) {
rendererMonitor = new LogRendererTimeExplanationProgressMonitor(axiom);
} else {
rendererMonitor = new ScreenRendererTimeExplanationProgressMonitor(axiom);
}
} else {
rendererMonitor = new SilentRendererTimeExplanationProgressMonitor();
}
MultipleExplanationGenerator expGen = new BundleHSTExplanationGenerator(getSingleExplanationGenerator());
// MultipleExplanationGenerator expGen = new HSTExplanationGenerator(getSingleExplanationGenerator());
expGen.setProgressMonitor(rendererMonitor);
OWLClassExpression unsatClass = converter.convert(axiom);
rendererMonitor.setParamAndStart(getReasoningTimeout());
explanations = expGen.getExplanations(unsatClass, getMaxExplanations());
// dispose explanation generator
expGen.dispose();
rendererMonitor.stopMonitoring();
if (explanations.isEmpty()) {
rendererMonitor.foundNoExplanations();
}
}
int expSize = explanations.size();
if (expSize == 0) {
errorExpCount++;
} else if (expSize == 1) {
if (explanations.iterator().next().size() > 1) {
multiAxiomExpCount++;
}
} else {
multipleExpCount++;
}
timer.stop();
// result.setUsedAxioms(usedAxioms);
timers.mainTimer.stop();
return new ExplanationResult(axiom, explanations, timers);
}
/**
* Creates a pMap from the probabilistic ontology.
*
* @throws NumberFormatException
*/
private void buildPMap() throws NumberFormatException {
if (!this.initialized) {
if (pMap != null) {
return;
}
//Timer PMapTimer = timers.startTimer("pMap");
OWLOntology ontologyprob;
OWLOntologyManager managerprob = null;
//if (getInputFiles().length == 2) {
// loaderprob = (OWLAPILoader) getLoader("OWLAPIv3");
// getKB(loaderprob, 1);
// ontologyprob = loaderprob.getOntology();
if (probOntologyName != null) {
managerprob = OWLManager.createOWLOntologyManager();
try {
ontologyprob = managerprob.loadOntologyFromOntologyDocument(IRI.create(probOntologyName));
} catch (OWLOntologyCreationException ex) {
// TODO
ontologyprob = rootOntology;
}
} else {
ontologyprob = rootOntology;
}
SortedSet axioms = new TreeSet<>(ontologyprob.getAxioms());
pMap = BundleUtilities.createPMap(axioms, showAll, false, false, accuracy, null);
if (probOntologyName != null) {
managerprob.removeOntology(ontologyprob);
}
ontologyprob = null;
// PMapTimer.stop();
}
}
// public void init() {
// init(false);
// }
@Override
public void init() {
// check consistency of the root ontology
// OWLReasoner reasoner = new PelletReasonerFactory().createNonBufferingReasoner(rootOntology);
// if (!reasoner.isConsistent()){
// PriorityCollection s = rootOntology.getOWLOntologyManager().getIRIMappers();
// throw new OWLOntologyInconsistentException("Inconsistent Ontology! ");
// }
// this.findProbability = findProbability;
//loader = (OWLAPILoader) getLoader("OWLAPIv3");
//getKB(loader, 0);
if (timers == null) {
timers = new Timers();
}
Timer timerInit = timers.startTimer("init");
if (!ApproxDouble.isAccuracySet()) {
ApproxDouble.setAccuracy(accuracy);
}
// prob = new ApproxDouble[0];
// axp = new String[0];
// axi = new OWLAxiom[0];
usedAxioms = new ArrayList<>();
if (findProbability) {
verbose("\n\nLoading probabilities...");
buildPMap();
if (pMap == null || pMap.isEmpty()) {
error("Probabilistic axioms not loaded in initialization: probability will be not computed.");
findProbability = false;
}
}
// Load reasoning method
loadMethod();
// Load the parameters for the query
loadNames();
if ((findProbability && pMap == null) || rootOntology == null) {
throw new ExceptionInInitializerError("Error in initialization: Ontology file not loaded");
} else {
initialized = true;
}
timerInit.stop();
}
@Override
public void setpMap(Map pMap) {
this.pMap = pMap;
}
/**
* Computes the probability of a set of explanations contained in a
* QueryResult object. After the computation of the probability, the given
* QueryResult object will update
*
* @param results a QueryResult object containing the set of explanations of
* which the method will compute the probability
*/
private QueryResult computeProbability(Set> explanations) {
timers.mainTimer.start();
QueryResult result = new QueryResult();
if (findProbability && pMap != null) {
Timer timerBDD = timers.startTimer("BDDCalc");
// VarAxAnn composed by two (three) arrays:
// prob (probability of axioms)
// axi (OWLAxiom)
// (axp (translations of axioms into strings))
setBddF();
BDD bdd = buildBDD(explanations);
ApproxDouble pq = probabilityOfBDD(bdd, new HashMap());
timerBDD.stop();
if ((pq.compareTo(ApproxDouble.one()) < 0)
&& (//BlockedIndividualSet.isApproximate() ||
getMaxExplanations() == explanations.size())) {
if (log) {
logger.warn("WARNING! The value of the probability may be a lower bound.");
} else {
System.out.println("WARNING! The value of the probability may be a lower bound.");
}
}
result.setBDD(bdd);
result.setQueryProbability(pq);
} else {
Timer timerBDD = timers.startTimer("BDDCalc");
setBddF();
timerBDD.stop();
result.setBDD(getBddF().one());
result.setQueryProbability(ApproxDouble.one());
}
// results.setProbAxioms(Arrays.asList(axi));
// results.setProbOfAxioms(Arrays.asList(prob));
result.setExplanations(explanations);
result.setProbAxioms(usedAxioms);
timers.mainTimer.stop();
result.setTimers(this.timers);
return result;
}
/**
* Build the BDD corresponding to the set of explanations given
*
* @param explanations the set of explanations for which the method
* calculates the corresponding BDD
* @return the resulting BDD
*/
private BDD buildBDD(Set> explanations) {
return BundleUtilities.buildBDD(explanations, bddF, pMap, usedAxioms);
}
/**
* Computes the probability of the given BDD. This is a recursive method
* that takes as input a BDD (that is the current node) and a map between
* all the already computed nodes and the corresponding probabilities in
* order to avoid to recompute the probability of already visited nodes
*
* @param node the current BDD
* @param nodes all the already computed probability with the corresponding
* BDD
* @return
*/
private ApproxDouble probabilityOfBDD(BDD node, Map nodes) {
return BundleUtilities.probabilityOfBDD(node, nodes, pMap, usedAxioms);
}
/**
* Looks for the given BDD in the map between already visited BDDs an
* probabilities
*
* @param node the current BDD
* @param nodes all the already computed probability with the corresponding
* BDD
* @return null if not yet visited or the probability previously computed
*/
private ApproxDouble getValue(BDD node, Map nodes) {
if (nodes.containsKey(node)) {
return nodes.get(node);
} else {
return null;
}
}
/**
* Adds to the map of the already visited node a new node.
*
* @param node the new node (BDD) to be added
* @param pt the probability of node
* @param nodes all the already computed probability with the corresponding
* BDD
*/
private void add_node(BDD node, ApproxDouble pt, Map nodes) {
nodes.put(node, pt);
}
/**
* **********************************************************************
*/
// private void printCalculateBDD(Set exp, OWLAxiom ax, String probType, String axType) {
// /*
// System.out.println("\n\nSPIEGAZIONE ANNULLATA!\n");
// for (OWLAxiom axiom : exp){
//
// String s1name = Utilities.getManchesterSyntaxString(axiom);
// System.out.println(" " + s1name);
// if (axiom.getAxiomType().toString().equals("SubClassOf") ||
// axiom.getAxiomType().toString().equals("TransitiveObjectProperty") ||
// axiom.getAxiomType().toString().equals("SubObjectPropertyOf") ||
// axiom.getAxiomType().toString().equals("SubPropertyChain") ||
// axiom.getAxiomType().toString().equals("SubDataPropertyOf")){
// System.out.println(" "+ ((OWLIndAxiom)axiom).getSubjName());
// if (((OWLIndAxiom)axiom).equalsStrict(ax)){
// System.out.println(" <= CAUSATO DA QUESTO ASSIOMA\n"+
// " Prob\t : Assioma\n" +
// " " + probType + "\t : " + axType);
// }
// } else if (axiom.equals(ax)){
// System.out.println(" <= CAUSATO DA QUESTO ASSIOMA\n"+
// " Prob\t : Assioma\n" +
// " " + probType + "\t : " + axType);
// }
//
// }
// */
// throw new UnsupportedOperationException("Useless method.");
// }
@Override
public void run() {
throw new UnsupportedOperationException("Useless method."); //To change body of generated methods, choose Tools | Templates.
}
/**
* @return the maxExplanations
*/
@Override
public int getMaxExplanations() {
return maxExplanations;
}
/**
* @param maxExplanations the maxExplanations to set
*/
@Override
public void setMaxExplanations(int maxExplanations) {
if (!this.initialized) {
this.maxExplanations = maxExplanations;
} else {
logger.warn("Cannot set max number of explanations. Cause: Bundle has been already initialized");
}
}
/**
* @return the maximum time for execution
*/
public int getReasoningTimeout() {
return reasoningTimeout;
}
/**
* @param reasoningTimeout the maximum time for execution to set (in
* milliseconds)
*/
public void setReasoningTimeout(int reasoningTimeout) {
if (!this.initialized) {
this.reasoningTimeout = reasoningTimeout;
}
}
/**
* @param maxTime the string of the maximum time for execution to set
*/
public void setMaxTime(String maxTime) {
if (!this.initialized) {
this.reasoningTimeout = BundleUtilities.convertTimeValue(maxTime);
}
}
/**
* @return the bdd factory
*/
public BDDFactory getBddF() {
return bddF;
}
/**
* @param bddF the bdd factory to set
*/
public void setBddF(BDDFactory bddF) {
if (!this.initialized) {
this.bddF = bddF;
}
}
/**
* Initializes the BDD factory.
*/
public void setBddF() {
if (this.bddF == null) {
// TODO USE public static BDDFactory init(String bddpackage,int nodenum,int cachesize)
// old version this.bddF = BuDDyFactory.init(100, 10000);
// try {
this.bddF = BDDFactory2.init(bddFType.toString().toLowerCase(),
100, 10000);
if (bddF instanceof JFactory) {
bddFType = BDDFactoryType.J;
}
// } catch (Exception ex) { // this block should be useless
// bddFType = BDDType.J;
// logger.warn("It is not possible to use the requested BDD factory"
// + "type " + bddFType + ". Maybe it was impossible to load"
// + "the libraries. Trying to \"J\" (100% Java "
// + "implementation of the BDD factory).");
// BDDFactory2.init(bddFType.toString().toLowerCase(), reasoningTimeout, reasoningTimeout);
// }
}
}
/**
* @param probOntology the probOntology to set
*/
public void setProbOntologyName(String probOntology) {
if (!this.initialized) {
this.probOntologyName = probOntology;
}
}
/**
* Print if verbose mode.
*
* @param msg the message to be written
* @param log true if writing on log is enabled, false otherwise
*/
protected void verbose(String msg, boolean log) {
if (verbose) {
if (log) {
logger.info(msg);
} else {
System.out.println(msg);
}
}
}
/**
* Print in verbose mode.
*
* @param msg the message to be written
*/
@Override
protected void verbose(String msg) {
verbose(msg, log);
}
/**
* Print in standard mode.
*
* @param msg the message to be written
* @param log true if writing on log is enabled, false otherwise
*/
protected void output(String msg, boolean log) {
if (log) {
logger.info(msg);
} else {
System.out.println(msg);
}
}
/**
* Print in standard mode.
*
* @param msg the message to be written
*/
@Override
protected void output(String msg) {
output(msg, log);
}
/**
* Print in error mode.
*
* @param msg the message to be written
* @param log true if writing on log is enabled, false otherwise
*/
protected void error(String msg, boolean log) {
if (log) {
logger.error(msg);
} else {
System.err.println(msg);
}
}
/**
* Print in error mode.
*
* @param msg the message to be written
*/
protected void error(String msg) {
if (log) {
error(msg, true);
}
error(msg, false);
}
/**
* Prints final results.
*
* @param result
*/
private void printResult(QueryResult result) {
output("");
output("============ Result ============");
output("N. of Explanations: " + result.getNumberOfExplanations());
if (findProbability) {
verbose("Probabilistic axioms used:", log);
for (int i = 0; i < safe(result.getProbAxioms()).size(); i++) {
OWLAxiom ax = result.getProbAxioms().get(i);
verbose("\t" + BundleUtilities.getManchesterSyntaxString(ax) + " - prob: "
+ pMap.get(ax), log);
}
output("");
output("Probability of the query: " + result.getQueryProbability().getValue());
}
output("");
// output("Execution time (ms): " + timers.getTimer("explain").getTotal());
output("Execution time (ms): " + timers.mainTimer.getTotal());
output("");
output("================================");
output("");
}
/**
* @param log the log to set
*/
public void setLog(boolean log) {
this.log = log;
}
/**
* @return the accuracy
*/
public int getAccuracy() {
return accuracy;
}
/**
* @param accuracy the accuracy to set
*/
public void setAccuracy(int accuracy) {
if (!this.initialized) {
this.accuracy = accuracy;
}
}
/**
* @param out A PrintStream for redirecting stdout
*/
public void setOut(PrintStream out) {
System.setOut(out);
}
/**
* @param out A PrintStream for redirecting stderr
*/
public void setErr(PrintStream out) {
System.setErr(out);
}
public BDDFactoryType getBddFType() {
return bddFType;
}
public void setBddFType(BDDFactoryType bddFType) {
if (!this.initialized) {
this.bddFType = bddFType;
}
}
public void setBddFType(String bddFTypeStr) {
if (!this.initialized) {
switch (bddFTypeStr.toLowerCase()) {
case "buddy":
bddFType = BDDFactoryType.BUDDY;
break;
case "cudd":
bddFType = BDDFactoryType.CUDD;
break;
case "cal":
bddFType = BDDFactoryType.CAL;
break;
case "j":
case "java":
bddFType = BDDFactoryType.J;
break;
case "jdd":
bddFType = BDDFactoryType.JDD;
break;
case "u":
bddFType = BDDFactoryType.U;
break;
}
}
}
public void setFindProbability(boolean findProbability) {
if (!this.initialized) {
this.findProbability = findProbability;
}
}
public void disposeBDDFactory() {
bddF.done();
}
public void setTimers(Timers timers) {
this.timers = timers;
}
// public void flush() {
// reasoner.flush();
// }
public void setShowAll(boolean showAll) {
this.showAll = showAll;
}
@Override
public void setRootOntology(OWLOntology rootOntology) {
this.rootOntology = rootOntology;
converter = new SatisfiabilityConverter(this.rootOntology.getOWLOntologyManager().getOWLDataFactory());
reasoner = new PelletReasonerFactory().createNonBufferingReasoner(this.rootOntology);
}
@Override
public OWLOntology getRootOntology() {
return rootOntology;
}
@Override
public void setReasoningTimeout(String reasoningTimeoutString) {
this.reasoningTimeout = reasoningTimeout;
}
@Override
public Map getpMap() {
return pMap;
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy