All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.clarkparsia.pellet.owlapiv3.PelletLoader Maven / Gradle / Ivy

There is a newer version: 2.3.6-ansell
Show newest version
// Portions Copyright (c) 2006 - 2008, Clark & Parsia, LLC. 
// Clark & Parsia, LLC parts of this source code are available under the terms of the Affero General Public License v3.
//
// Please see LICENSE.txt for full license terms, including the availability of proprietary exceptions.
// Questions, comments, or requests for clarification: [email protected]
//
// ---
// Portions Copyright (c) 2003 Ron Alford, Mike Grove, Bijan Parsia, Evren Sirin
// Alford, Grove, Parsia, Sirin parts of this source code are available under the terms of the MIT License.
//
// The MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.

package com.clarkparsia.pellet.owlapiv3;

import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;

import org.mindswap.pellet.KnowledgeBase;
import org.mindswap.pellet.exceptions.InternalReasonerException;
import org.mindswap.pellet.utils.Timer;
import org.semanticweb.owlapi.model.AddAxiom;
import org.semanticweb.owlapi.model.AddImport;
import org.semanticweb.owlapi.model.AddOntologyAnnotation;
import org.semanticweb.owlapi.model.OWLAxiom;
import org.semanticweb.owlapi.model.OWLException;
import org.semanticweb.owlapi.model.OWLObject;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.model.OWLOntologyChange;
import org.semanticweb.owlapi.model.OWLOntologyChangeVisitor;
import org.semanticweb.owlapi.model.OWLOntologyManager;
import org.semanticweb.owlapi.model.RemoveAxiom;
import org.semanticweb.owlapi.model.RemoveImport;
import org.semanticweb.owlapi.model.RemoveOntologyAnnotation;
import org.semanticweb.owlapi.model.SetOntologyID;

import aterm.ATermAppl;

/**
 * 

Title:

* *

Description:

* *

Copyright: Copyright (c) 2007

* *

Company: Clark & Parsia, LLC.

* * @author Evren Sirin */ public class PelletLoader { public static Logger log = Logger .getLogger( PelletLoader.class.getName() ); private KnowledgeBase kb; // private Set loadedFiles; private OWLOntologyManager manager; private Set ontologies; /** * Flag to check if imports will be automatically loaded/unloaded */ private boolean processImports; /** * Ontologies that are loaded due to imports but they have not been included * in an explicit load statement by the user */ private Set notImported; /** * This is the reverse mapping of imports. The key is an ontology and the * value is a set of ontology that imports the ontology used as the key */ private Map> importDependencies; private PelletVisitor visitor; private ChangeVisitor changeVisitor = new ChangeVisitor(); private class ChangeVisitor implements OWLOntologyChangeVisitor { private boolean reloadRequired; public boolean isReloadRequired() { return reloadRequired; } /** * Process a change, providing a single call for common * reset,accept,isReloadRequired pattern. * * @param change * the {@link OWLOntologyChange} to process * @return true if change is handled, false if * a reload is required */ public boolean process(OWLOntologyChange change) { this.reset(); change.accept( this ); return !isReloadRequired(); } public void reset() { visitor.reset(); reloadRequired = false; } public void visit(AddAxiom change) { visitor.setAddAxiom( true ); change.getAxiom().accept( visitor ); reloadRequired = visitor.isReloadRequired(); } public void visit(RemoveAxiom change) { visitor.setAddAxiom( false ); change.getAxiom().accept( visitor ); reloadRequired = visitor.isReloadRequired(); } public void visit(AddImport change) { reloadRequired = getProcessImports(); } public void visit(AddOntologyAnnotation change) { // TODO Auto-generated method stub } public void visit(RemoveImport change) { reloadRequired = getProcessImports(); } public void visit(RemoveOntologyAnnotation change) { // TODO Auto-generated method stub } public void visit(SetOntologyID change) { // nothing to do here } } public PelletLoader(KnowledgeBase kb) { this.kb = kb; visitor = new PelletVisitor( kb ); processImports = true; ontologies = new HashSet(); notImported = new HashSet(); importDependencies = new HashMap>(); } /** * @deprecated Use {@link #getProcessImports()} instead */ public boolean loadImports() { return getProcessImports(); } /** * @deprecated Use {@link #setProcessImports(boolean)} instead */ public void setLoadImports(boolean loadImports) { setProcessImports( loadImports ); } public boolean getProcessImports() { return processImports; } public void setProcessImports(boolean processImports) { this.processImports = processImports; } public void clear() { visitor.clear(); kb.clear(); ontologies.clear(); notImported.clear(); importDependencies.clear(); // loadedFiles = new HashSet(); // loadedFiles.add( Namespaces.OWL ); // loadedFiles.add( Namespaces.RDF ); // loadedFiles.add( Namespaces.RDFS ); } public KnowledgeBase getKB() { return kb; } public void setKB(KnowledgeBase kb) { this.kb = kb; } public ATermAppl term(OWLObject d) { visitor.reset(); visitor.setAddAxiom( false ); d.accept( visitor ); ATermAppl a = visitor.result(); if( a == null ) throw new InternalReasonerException( "Cannot create ATerm from description " + d ); return a; } public void reload() { log.fine( "Reloading the ontologies" ); // copy the loaded ontologies Set notImportedOnts = new HashSet( notImported ); // clear everything clear(); // load ontologies again load( notImportedOnts ); } public void load(Set ontologies) { Timer timer = kb.timers.startTimer( "load" ); int axiomCount = 0; Collection toBeLoaded = new LinkedHashSet(); for( OWLOntology ontology : ontologies ) axiomCount += load( ontology, false, toBeLoaded ); visitor.reset(); visitor.setAddAxiom( true ); for( OWLOntology ontology : toBeLoaded ) ontology.accept( visitor ); visitor.verify(); timer.stop(); } private int load(OWLOntology ontology, boolean imported, Collection toBeLoaded) { // if not imported add it to notImported set if( !imported ) notImported.add( ontology ); // add to the loaded ontologies boolean added = ontologies.add( ontology ); // if it was already there, nothing more to do if( !added ) return 0; int axiomCount = ontology.getAxioms().size(); toBeLoaded.add( ontology ); // if processing imports load the imported ontologies if( processImports ) { for( OWLOntology importedOnt : ontology.getImports() ) { // load the importedOnt axiomCount += load( importedOnt, true, toBeLoaded ); // update the import dependencies Set importees = importDependencies.get( importedOnt ); if( importees == null ) { importees = new HashSet(); importDependencies.put( importedOnt, importees ); } importees.add( ontology ); } } return axiomCount; } public Set getUnsupportedAxioms() { return visitor.getUnsupportedAxioms(); } public void unload(Set ontologies) { for( OWLOntology ontology : ontologies ) unload( ontology ); } private void unload(OWLOntology ontology) { // remove the ontology from the set boolean removed = ontologies.remove( ontology ); // if it is not there silently return if( !removed ) return; // remove it from notImported set, too notImported.remove( ontology ); // if we are processing imports we might need to unload the // imported ontologies if( processImports ) { // go over the imports for( OWLOntology importOnt : ontology.getImports() ) { // see if the importedOnt is imported by any other ontology Set importees = importDependencies.get( importOnt ); if( importees != null ) { // remove the unloaded ontology from the dependencies importees.remove( ontology ); // if nothing is left if( importees.isEmpty() ) { // remove the empty set from dependencies importDependencies.remove( importOnt ); // only unload if this ontology was not loaded by the // user explicitly if( !notImported.contains( importOnt ) ) unload( importOnt ); } } } } } /** * @return Returns the loaded ontologies. */ public Set getOntologies() { return Collections.unmodifiableSet( ontologies ); } public OWLOntologyManager getManager() { return manager; } public void setManager(OWLOntologyManager manager) { this.manager = manager; } /** * Apply the given changes to the Pellet KB. * * @param changes * List of ontology changes to be applied * @return true if changes applied successfully, * false otherwise indicating a reload is required * @throws OWLException */ public boolean applyChanges(List changes) { for( OWLOntologyChange change : changes ) { if( !ontologies.contains( change.getOntology() ) ) continue; if( !changeVisitor.process( change ) ) { if( log.isLoggable( Level.FINE ) ) log.fine( "Reload required by ontology change " + change ); return false; } } return true; } }




© 2015 - 2024 Weber Informatics LLC | Privacy Policy