Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
/**
* Copyright (c) 2009 International Health Terminology Standards Development
* Organisation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Copyright CSIRO Australian e-Health Research Centre (http://aehrc.com).
* All rights reserved. Use is subject to license terms and conditions.
*/
package au.csiro.snorocket.core;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ConcurrentSkipListSet;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import au.csiro.ontology.Node;
import au.csiro.ontology.model.Axiom;
import au.csiro.ontology.model.Concept;
import au.csiro.ontology.model.ConceptInclusion;
import au.csiro.ontology.model.Literal;
import au.csiro.ontology.model.NamedFeature;
import au.csiro.ontology.model.NamedRole;
import au.csiro.ontology.model.Role;
import au.csiro.ontology.model.RoleInclusion;
import au.csiro.ontology.util.Statistics;
import au.csiro.snorocket.core.axioms.GCI;
import au.csiro.snorocket.core.axioms.IConjunctionQueueEntry;
import au.csiro.snorocket.core.axioms.IRoleQueueEntry;
import au.csiro.snorocket.core.axioms.Inclusion;
import au.csiro.snorocket.core.axioms.NF1a;
import au.csiro.snorocket.core.axioms.NF1b;
import au.csiro.snorocket.core.axioms.NF2;
import au.csiro.snorocket.core.axioms.NF3;
import au.csiro.snorocket.core.axioms.NF4;
import au.csiro.snorocket.core.axioms.NF5;
import au.csiro.snorocket.core.axioms.NF6;
import au.csiro.snorocket.core.axioms.NF7;
import au.csiro.snorocket.core.axioms.NF8;
import au.csiro.snorocket.core.axioms.NormalFormGCI;
import au.csiro.snorocket.core.axioms.RI;
import au.csiro.snorocket.core.concurrent.CR;
import au.csiro.snorocket.core.concurrent.Context;
import au.csiro.snorocket.core.concurrent.TaxonomyWorker1;
import au.csiro.snorocket.core.concurrent.TaxonomyWorker2;
import au.csiro.snorocket.core.concurrent.Worker;
import au.csiro.snorocket.core.model.AbstractConcept;
import au.csiro.snorocket.core.model.AbstractLiteral;
import au.csiro.snorocket.core.model.Conjunction;
import au.csiro.snorocket.core.model.Datatype;
import au.csiro.snorocket.core.model.DateLiteral;
import au.csiro.snorocket.core.model.DecimalLiteral;
import au.csiro.snorocket.core.model.Existential;
import au.csiro.snorocket.core.model.IntegerLiteral;
import au.csiro.snorocket.core.model.StringLiteral;
import au.csiro.snorocket.core.util.AxiomSet;
import au.csiro.snorocket.core.util.DenseConceptMap;
import au.csiro.snorocket.core.util.FastConceptHashSet;
import au.csiro.snorocket.core.util.FastConceptMap;
import au.csiro.snorocket.core.util.FeatureMap;
import au.csiro.snorocket.core.util.FeatureSet;
import au.csiro.snorocket.core.util.IConceptMap;
import au.csiro.snorocket.core.util.IConceptSet;
import au.csiro.snorocket.core.util.IMonotonicCollection;
import au.csiro.snorocket.core.util.IntIterator;
import au.csiro.snorocket.core.util.MonotonicCollection;
import au.csiro.snorocket.core.util.RoleSet;
import au.csiro.snorocket.core.util.SparseConceptHashSet;
import au.csiro.snorocket.core.util.SparseConceptMap;
import au.csiro.snorocket.core.util.SparseConceptSet;
/**
* A normalised EL Ontology
*
* @author law223
*
*/
public class NormalisedOntology implements Serializable {
/**
* Serialisation version.
*/
private static final long serialVersionUID = 1L;
/**
* Logger.
*/
private final static Logger log = LoggerFactory.getLogger(NormalisedOntology.class);
final protected IFactory factory;
/**
* The set of NF1 terms in the ontology
*
*
Concept map 76.5% full (SNOMED 20061230)
*
*
* These terms are of the form A n Ai [ B and are indexed by A.
*/
final protected IConceptMap> ontologyNF1;
/**
* The set of NF2 terms in the ontology
*
*
Concept map 34.7% full (SNOMED 20061230)
*
*
* These terms are of the form A [ r.B and are indexed by A.
*/
final protected IConceptMap> ontologyNF2;
/**
* The set of NF3 terms in the ontology
*
*
Concept map 9.3% full (SNOMED 20061230)
*
Unknown usage profile for Role maps
*
*
* These terms are of the form r.A [ b and indexed by A.
*/
final protected IConceptMap>> ontologyNF3;
/**
* The set of NF4 terms in the ontology
*/
final protected IMonotonicCollection ontologyNF4;
/**
* The set of NF5 terms in the ontology
*/
final protected IMonotonicCollection ontologyNF5;
/**
* The set of reflexive roles in the ontology
*/
final protected IConceptSet reflexiveRoles = new SparseConceptSet();
/**
* The set of NF7 terms in the ontology.
*
* These terms are of the form A [ f.(o, v) and are indexed by A.
*/
final protected IConceptMap> ontologyNF7;
/**
* The set of NF8 terms in the ontology.
*
* These terms are of the form f.(o, v) [ A. These are indexed by f.
*/
final protected FeatureMap> ontologyNF8;
/**
* The queue of contexts to process.
*/
private final Queue todo = new ConcurrentLinkedQueue();
/**
* The map of contexts by concept id.
*/
private final IConceptMap contextIndex;
/**
* The global role closure.
*/
private final Map roleClosureCache;
/**
* A set of new contexts added in an incremental classification.
*/
private final Set newContexts = new HashSet();
/**
* The number of threads to use.
*/
private int numThreads = Runtime.getRuntime().availableProcessors();
private boolean hasBeenIncrementallyClassified = false;
private transient Map conceptNodeIndex;
private static class ContextComparator implements Comparator, Serializable {
/**
* Serialisation version.
*/
private static final long serialVersionUID = 1L;
public int compare(Context o1, Context o2) {
return ((Integer) o1.getConcept()).compareTo(o2.getConcept());
}
}
/**
* A set of contexts potentially affected by an incremental classification.
*/
private final Set affectedContexts =
new ConcurrentSkipListSet(new ContextComparator());
/**
* Normalised axioms added incrementally.
*/
private AxiomSet as = new AxiomSet();
public IConceptMap> getOntologyNF1() {
return ontologyNF1;
}
public IConceptMap> getOntologyNF2() {
return ontologyNF2;
}
public IConceptMap>> getOntologyNF3() {
return ontologyNF3;
}
public IMonotonicCollection getOntologyNF4() {
return ontologyNF4;
}
public IMonotonicCollection getOntologyNF5() {
return ontologyNF5;
}
public IConceptSet getReflexiveRoles() {
return reflexiveRoles;
}
public IConceptMap> getOntologyNF7() {
return ontologyNF7;
}
public FeatureMap> getOntologyNF8() {
return ontologyNF8;
}
public Queue getTodo() {
return todo;
}
public IConceptMap getContextIndex() {
return contextIndex;
}
public Map getRoleClosureCache() {
return roleClosureCache;
}
public Set getAffectedContexts() {
return affectedContexts;
}
/**
*
* @param factory
* @param inclusions
*/
public NormalisedOntology(final IFactory factory, final Set inclusions) {
this(factory);
loadAxioms(inclusions);
if(log.isTraceEnabled()) {
printNormalisedAxioms();
}
}
final static int CONCEPT_COUNT_ESTIMATE = 500000;
/**
*
* @param baseConceptCount
* @param conceptCount
* if this value is too small, the algorithm performance will be
* impacted
* @param roleCount
*/
public NormalisedOntology(final IFactory factory) {
// TODO: how do we estimate these numbers better?
this(
factory,
new DenseConceptMap>(CONCEPT_COUNT_ESTIMATE),
new SparseConceptMap>(CONCEPT_COUNT_ESTIMATE, "ontologyNF2"),
new SparseConceptMap>>(CONCEPT_COUNT_ESTIMATE, "ontologyNF3"),
new MonotonicCollection(15), new MonotonicCollection(1),
new SparseConceptMap>(10, "ontologyNF7"),
new FeatureMap>(10)
);
}
/**
*
* @param factory
* @param nf1q
* @param nf2q
* @param nf3q
* @param nf4q
* @param nf5q
* @param nf7q
* @param nf8q
*/
protected NormalisedOntology(
final IFactory factory,
final IConceptMap> nf1q,
final IConceptMap> nf2q,
final IConceptMap>> nf3q,
final IMonotonicCollection nf4q,
final IMonotonicCollection nf5q,
final IConceptMap> nf7q,
final FeatureMap> nf8q) {
this.factory = factory;
contextIndex = new FastConceptMap(factory.getTotalConcepts(), "");
roleClosureCache = new ConcurrentHashMap(factory.getTotalRoles());
this.ontologyNF1 = nf1q;
this.ontologyNF2 = nf2q;
this.ontologyNF3 = nf3q;
this.ontologyNF4 = nf4q;
this.ontologyNF5 = nf5q;
this.ontologyNF7 = nf7q;
this.ontologyNF8 = nf8q;
}
/**
* Normalises and loads a set of axioms.
*
* @param inclusions
*/
public void loadAxioms(final Set inclusions) {
long start = System.currentTimeMillis();
if(log.isInfoEnabled())
log.info("Loading " + inclusions.size() + " axioms");
Set normInclusions = normalise(inclusions);
if(log.isInfoEnabled()) log.info("Processing " + normInclusions.size() + " normalised axioms");
Statistics.INSTANCE.setTime("normalisation", System.currentTimeMillis() - start);
start = System.currentTimeMillis();
for (Inclusion i : normInclusions) {
addTerm(i.getNormalForm());
}
Statistics.INSTANCE.setTime("indexing", System.currentTimeMillis() - start);
}
/**
* EXPERIMENTAL
*
* TODO deal with missing NF1bs axioms!
*
* TODO: inspect NF1bs and hierarchy to derive missing axioms!
*/
public void prapareForInferred() {
log.info("Adding additional axioms to calculate inferred axioms");
int key = 0;
int numNf3 = 0;
int numNf8 = 0;
for(final IntIterator itr = ontologyNF2.keyIterator(); itr.hasNext();) {
MonotonicCollection nf2s = ontologyNF2.get(itr.next());
for(final Iterator itr2 = nf2s.iterator(); itr2.hasNext();) {
NF2 nf2 = itr2.next();
// TODO remove
if(factory.lookupConceptId(nf2.lhsA).equals("287402001")) {
System.err.println(nf2);
key = nf2.rhsB;
}
// TODO remove
// The problem is likely to be in this method call!
if(!containsExistentialInNF3s(nf2.rhsR, nf2.rhsB, nf2.lhsA)) {
NF3 nnf = NF3.getInstance(nf2.rhsR, nf2.rhsB, factory.getConcept(new Existential(nf2.rhsR,
new au.csiro.snorocket.core.model.Concept(nf2.rhsB))));
//as.addAxiom(nnf); // Needed for incremental
addTerm(nnf);
numNf3++;
if(factory.lookupConceptId(nf2.lhsA).equals("287402001")) {
System.err.println("Added "+nnf);
}
}
}
}
System.out.println(key);
// TODO remove
for(NF2 nn : ontologyNF2.get(key)) {
System.err.println(nn);
}
// TODO remove
for(final IntIterator itr = ontologyNF7.keyIterator(); itr.hasNext();) {
MonotonicCollection nf7s = ontologyNF7.get(itr.next());
for(final Iterator itr2 = nf7s.iterator(); itr2.hasNext();) {
NF7 nf7 = itr2.next();
Datatype rhs = nf7.getD();
if(!containsDatatypeInNF8s(rhs)) {
NF8 nnf = NF8.getInstance(rhs, factory.getConcept(rhs));
//as.addAxiom(nnf); // Needed for incremental
addTerm(nnf);
numNf8++;
}
}
}
log.info("Added "+numNf3+" NF3 axioms and "+numNf8+" NF8 axioms.");
// FIXME: there seems to be an issue with incremental classification and these axioms. For now these will be
// excluded because there is no need for these for SNOMED CT and AMT.
/*
// try introducing a new axiom for each NF1b new concept [ A1 + A2 - add the conjunction object as key
for(NF1b nf1b : getNF1bs()) {
Object a1 = factory.lookupConceptId(nf1b.lhsA1());
Object a2 = factory.lookupConceptId(nf1b.lhsA2());
AbstractConcept ac1 = null;
AbstractConcept ac2 = null;
// We assume these are either Strings or Existentials
if(a1 instanceof String) {
ac1 = new au.csiro.snorocket.core.model.Concept(nf1b.lhsA1());
} else {
// This will throw a ClassCastException if an object outside of the internal model is found
ac1 = (AbstractConcept) a1;
}
if(a2 instanceof String) {
ac2 = new au.csiro.snorocket.core.model.Concept(nf1b.lhsA2());
} else {
// This will throw a ClassCastException if an object outside of the internal model is found
ac2 = (AbstractConcept) a2;
}
Conjunction con = new Conjunction(new AbstractConcept[] { ac1, ac2 });
int nid = factory.getConcept(con);
NF1a nf1a1 = NF1a.getInstance(nid, nf1b.lhsA1());
NF1a nf1a2 = NF1a.getInstance(nid, nf1b.lhsA2());
addTerm(nf1a1);
addTerm(nf1a2);
}
*/
}
public Collection getNF1bs() {
Collection res = new HashSet();
for(IntIterator it = ontologyNF1.keyIterator(); it.hasNext(); ) {
int a = it.next();
MonotonicCollection mc = ontologyNF1.get(a);
for(Iterator it2 = mc.iterator(); it2.hasNext(); ) {
IConjunctionQueueEntry entry = it2.next();
if(entry instanceof NF1b) {
res.add((NF1b) entry);
}
}
}
return res;
}
private boolean containsExistentialInNF3s(int r, int a, int b) {
ConcurrentMap> nf3Map = ontologyNF3.get(a);
if(nf3Map == null) return false;
Collection nf3s = nf3Map.get(r);
if(nf3s == null) return false;
for(IConjunctionQueueEntry nf3 : nf3s) {
if(b == nf3.getB()) return true;
}
return false;
}
private boolean containsDatatypeInNF8s(Datatype d) {
MonotonicCollection nf8s = ontologyNF8.get(d.getFeature());
for(final Iterator itr = nf8s.iterator(); itr.hasNext();) {
if(itr.next().lhsD.equals(d)) {
return true;
}
}
return false;
}
/**
* Transforms a {@link Set} of {@link AbstractAxiom}s into a {@link Set} of {@link Inclusion}s.
*
* @param axioms The axioms in the ontology model format.
* @return The axioms in the internal model format.
*/
private Set transformAxiom(final Set axioms) {
Set res = new HashSet();
for(Axiom aa : axioms) {
if(aa instanceof ConceptInclusion) {
ConceptInclusion ci = (ConceptInclusion)aa;
Concept lhs = ci.getLhs();
Concept rhs = ci.getRhs();
res.add(new GCI(transformConcept(lhs), transformConcept(rhs)));
} else if(aa instanceof RoleInclusion) {
RoleInclusion ri = (RoleInclusion)aa;
Role[] lh = ri.getLhs();
NamedRole[] lhs = new NamedRole[lh.length];
for(int i = 0; i < lh.length; i++) {
lhs[i] = (NamedRole) lh[i];
}
NamedRole rhs = (NamedRole) ri.getRhs();
int[] lhsInt = new int[lhs.length];
for(int i = 0; i < lhsInt.length; i++) {
lhsInt[i] = factory.getRole(lhs[i].getId());
}
res.add(new RI(lhsInt, factory.getRole(rhs.getId())));
}
}
return res;
}
/**
* Transforms an {@link AbstractConcept} into an {@link au.csiro.snorocket.core.model.AbstractConcept}.
*
* @param c The concept in the ontology model format.
* @return The concept in the internal model format.
*/
private au.csiro.snorocket.core.model.AbstractConcept transformConcept(Concept c) {
if(c.equals(au.csiro.ontology.model.NamedConcept.TOP_CONCEPT)) {
return new au.csiro.snorocket.core.model.Concept(IFactory.TOP_CONCEPT);
} else if(c.equals(au.csiro.ontology.model.NamedConcept.BOTTOM_CONCEPT)) {
return new au.csiro.snorocket.core.model.Concept(IFactory.BOTTOM_CONCEPT);
} else if(c instanceof au.csiro.ontology.model.NamedConcept) {
return new au.csiro.snorocket.core.model.Concept(
factory.getConcept(((au.csiro.ontology.model.NamedConcept) c).getId()));
} else if(c instanceof au.csiro.ontology.model.Conjunction) {
Concept[] modelCons = ((au.csiro.ontology.model.Conjunction)c).getConcepts();
au.csiro.snorocket.core.model.AbstractConcept[] cons =
new au.csiro.snorocket.core.model.AbstractConcept[modelCons.length];
for(int i = 0; i < modelCons.length; i++) {
cons[i] = transformConcept(modelCons[i]);
}
return new Conjunction(cons);
} else if(c instanceof au.csiro.ontology.model.Datatype) {
au.csiro.ontology.model.Datatype dt = (au.csiro.ontology.model.Datatype) c;
return new Datatype(factory.getFeature(((NamedFeature) dt.getFeature()).getId()), dt.getOperator(),
transformLiteral(dt.getLiteral()));
} else if(c instanceof au.csiro.ontology.model.Existential) {
au.csiro.ontology.model.Existential e = (au.csiro.ontology.model.Existential) c;
return new Existential(factory.getRole(((NamedRole) e.getRole()).getId()),
transformConcept(e.getConcept()));
} else {
throw new RuntimeException("Unexpected AbstractConcept "+c.getClass().getName());
}
}
/**
* Transforms an {@link ILiteral} into an {@link au.csiro.snorocket.core.model.AbstractLiteral}.
*
* @param l The literal in the ontology model format.
* @return The literal in the internal model format.
*/
private au.csiro.snorocket.core.model.AbstractLiteral transformLiteral(Literal l) {
if(l instanceof au.csiro.ontology.model.DateLiteral) {
return new DateLiteral(((au.csiro.ontology.model.DateLiteral) l).getValue());
} else if(l instanceof au.csiro.ontology.model.DecimalLiteral) {
return new DecimalLiteral(((au.csiro.ontology.model.DecimalLiteral) l).getValue());
} else if(l instanceof au.csiro.ontology.model.IntegerLiteral) {
return new IntegerLiteral(((au.csiro.ontology.model.IntegerLiteral) l).getValue());
} else if(l instanceof au.csiro.ontology.model.StringLiteral) {
return new StringLiteral(((au.csiro.ontology.model.StringLiteral) l).getValue());
} else if(l instanceof au.csiro.ontology.model.FloatLiteral) {
return new DecimalLiteral(new BigDecimal(((au.csiro.ontology.model.FloatLiteral) l).getValue()));
} else {
throw new RuntimeException("Unexpected AbstractLiteral "+l.getClass().getName());
}
}
/**
* Returns a set of Inclusions in normal form suitable for classifying.
*/
public Set normalise(final Set inclusions) {
// Exhaustively apply NF1 to NF4
Set newIs = transformAxiom(inclusions);
Set oldIs = new HashSet(newIs.size());
final Set done = new HashSet(newIs.size());
do {
final Set tmp = oldIs;
oldIs = newIs;
newIs = tmp;
newIs.clear();
for (Inclusion i : oldIs) {
Inclusion[] s = i.normalise1(factory);
if (null != s) {
for (int j = 0; j < s.length; j++) {
if (null != s[j]) {
newIs.add(s[j]);
}
}
} else {
done.add(i);
}
}
} while (!newIs.isEmpty());
newIs.addAll(done);
done.clear();
// Then exhaustively apply NF5 to NF7
do {
final Set tmp = oldIs;
oldIs = newIs;
newIs = tmp;
newIs.clear();
for (Inclusion i : oldIs) {
Inclusion[] s = i.normalise2(factory);
if (null != s) {
for (int j = 0; j < s.length; j++) {
if (null != s[j]) {
newIs.add(s[j]);
}
}
} else {
done.add(i);
}
}
} while (!newIs.isEmpty());
if(log.isTraceEnabled()) {
log.trace("Normalised axioms:");
for(Inclusion inc : done) {
StringBuilder sb = new StringBuilder();
if(inc instanceof GCI) {
GCI gci = (GCI)inc;
sb.append(printInternalObject(gci.lhs()));
sb.append(" [ ");
sb.append(printInternalObject(gci.rhs()));
} else if(inc instanceof RI) {
RI ri = (RI)inc;
int[] lhs = ri.getLhs();
sb.append(factory.lookupRoleId(lhs[0]));
for(int i = 1; i < lhs.length; i++) {
sb.append(" * ");
sb.append(factory.lookupRoleId(lhs[i]));
}
sb.append(" [ ");
sb.append(factory.lookupRoleId(ri.getRhs()));
}
log.trace(sb.toString());
}
}
return done;
}
/**
* Prints an object of the internal model using the string representation
* of the corresponding object in the external model.
*
* @param o
* @return
*/
private String printInternalObject(Object o) {
if(o instanceof Conjunction) {
Conjunction con = (Conjunction)o;
StringBuilder sb = new StringBuilder();
AbstractConcept[] cons = con.getConcepts();
if(cons != null && cons.length > 0) {
sb.append(printInternalObject(cons[0]));
for(int i = 1; i < cons.length; i++) {
sb.append(" + ");
sb.append(printInternalObject(cons[i]));
}
}
return sb.toString();
} else if(o instanceof Existential) {
Existential e = (Existential)o;
AbstractConcept c = e.getConcept();
int role = e.getRole();
return factory.lookupRoleId(role)+"."+printInternalObject(c);
} else if(o instanceof Datatype) {
StringBuilder sb = new StringBuilder();
Datatype d = (Datatype)o;
String feature = factory.lookupFeatureId(d.getFeature());
sb.append(feature.toString());
sb.append(".(");
AbstractLiteral literal = d.getLiteral();
sb.append(literal);
sb.append(")");
return sb.toString();
} else if(o instanceof Concept) {
Object obj = factory.lookupConceptId(((Concept)o).hashCode());
if(obj == au.csiro.ontology.model.NamedConcept.TOP) {
return "TOP";
} else if(obj == au.csiro.ontology.model.NamedConcept.BOTTOM) {
return "BOTTOM";
} else if(obj instanceof AbstractConcept) {
return "<"+printInternalObject(obj)+">";
} else {
return obj.toString();
}
} else if(o instanceof Comparable) {
return o.toString();
} else {
throw new RuntimeException("Unexpected object with class "+
o.getClass().getName());
}
}
/**
* Adds a normalised term to the ontology.
*
* @param term
* The normalised term.
*/
protected void addTerm(NormalFormGCI term) {
if (term instanceof NF1a) {
final NF1a nf1 = (NF1a) term;
final int a = nf1.lhsA();
addTerms(ontologyNF1, a, nf1.getQueueEntry());
} else if (term instanceof NF1b) {
final NF1b nf1 = (NF1b) term;
final int a1 = nf1.lhsA1();
final int a2 = nf1.lhsA2();
addTerms(ontologyNF1, a1, nf1.getQueueEntry1());
addTerms(ontologyNF1, a2, nf1.getQueueEntry2());
} else if (term instanceof NF2) {
final NF2 nf2 = (NF2) term;
addTerms(ontologyNF2, nf2);
} else if (term instanceof NF3) {
final NF3 nf3 = (NF3) term;
addTerms(ontologyNF3, nf3);
} else if (term instanceof NF4) {
ontologyNF4.add((NF4) term);
} else if (term instanceof NF5) {
ontologyNF5.add((NF5) term);
} else if (term instanceof NF6) {
reflexiveRoles.add(((NF6) term).getR());
} else if (term instanceof NF7) {
final NF7 nf7 = (NF7) term;
addTerms(ontologyNF7, nf7);
} else if (term instanceof NF8) {
final NF8 nf8 = (NF8) term;
addTerms(ontologyNF8, nf8);
} else {
throw new IllegalArgumentException("Type of " + term
+ " must be one of NF1 through NF8");
}
}
/**
*
* @param entries
* @param a
* @param queueEntry
*/
protected void addTerms(final IConceptMap> entries, final int a,
final IConjunctionQueueEntry queueEntry) {
MonotonicCollection queueA = entries.get(a);
if (null == queueA) {
queueA = new MonotonicCollection(2);
entries.put(a, queueA);
}
queueA.add(queueEntry);
}
/**
*
* @param entries
* @param nf2
*/
protected void addTerms(
final IConceptMap> entries, final NF2 nf2) {
MonotonicCollection set = entries.get(nf2.lhsA);
if (null == set) {
set = new MonotonicCollection(2);
entries.put(nf2.lhsA, set);
}
set.add(nf2);
}
/**
*
* @param queue
* @param nf3
*/
protected void addTerms(final IConceptMap>> queue,
final NF3 nf3) {
ConcurrentMap> map = queue.get(nf3.lhsA);
Collection entry;
if (null == map) {
map = new ConcurrentHashMap>(
factory.getTotalRoles());
queue.put(nf3.lhsA, map);
entry = null;
} else {
entry = map.get(nf3.lhsR);
}
if (null == entry) {
entry = new HashSet();
entry.add(nf3.getQueueEntry());
map.put(nf3.lhsR, entry);
} else {
entry.add(nf3.getQueueEntry());
}
}
protected void addTerms(final IConceptMap> entries, final NF7 nf7) {
MonotonicCollection set = entries.get(nf7.lhsA);
if (null == set) {
set = new MonotonicCollection(2);
entries.put(nf7.lhsA, set);
}
set.add(nf7);
}
/**
*
* @param entries
* @param nf8
*/
protected void addTerms(final FeatureMap> entries, final NF8 nf8) {
MonotonicCollection set = entries.get(nf8.lhsD.getFeature());
if (null == set) {
set = new MonotonicCollection(2);
entries.put(nf8.lhsD.getFeature(), set);
}
set.add(nf8);
}
/**
*
* @param incAxioms
*/
public void loadIncremental(Set incAxioms) {
// Normalise
Set norm = normalise(incAxioms);
for(Inclusion inc : norm) {
NormalFormGCI nf = inc.getNormalForm();
as.addAxiom(nf);
addTerm(nf);
}
}
/**
* Runs an incremental classification.
*
* @return
*/
public void classifyIncremental() {
if(as.isEmpty()) return;
// Clear any state from previous incremental classifications
newContexts.clear();
affectedContexts.clear();
int numNewConcepts = 0;
// Determine which contexts are affected
for (NF1a i : as.getNf1aAxioms()) {
numNewConcepts = processInclusion(numNewConcepts, i);
}
for (NF1b i : as.getNf1bAxioms()) {
numNewConcepts = processInclusion(numNewConcepts, i);
}
for (NF2 i : as.getNf2Axioms()) {
numNewConcepts = processInclusion(numNewConcepts, i);
}
for (NF3 i : as.getNf3Axioms()) {
numNewConcepts = processInclusion(numNewConcepts, i);
}
for (NF4 i : as.getNf4Axioms()) {
numNewConcepts = processInclusion(numNewConcepts, i);
}
for (NF5 i : as.getNf5Axioms()) {
numNewConcepts = processInclusion(numNewConcepts, i);
}
for (NF6 i : as.getNf6Axioms()) {
numNewConcepts = processInclusion(numNewConcepts, i);
}
for (NF7 i : as.getNf7Axioms()) {
numNewConcepts = processInclusion(numNewConcepts, i);
}
for (NF8 i : as.getNf8Axioms()) {
numNewConcepts = processInclusion(numNewConcepts, i);
}
if(log.isInfoEnabled()) log.info("Added " + numNewConcepts + " new concepts to the ontology");
// TODO: this is potentially slow
IConceptMap subsumptions = getSubsumptions();
rePrimeNF1(as, subsumptions);
rePrimeNF2(as, subsumptions);
rePrimeNF3(as, subsumptions);
rePrimeNF4(as, subsumptions);
rePrimeNF5(as, subsumptions);
rePrimeNF6(as, subsumptions);
rePrimeNF7(as, subsumptions);
rePrimeNF8(as, subsumptions);
// Classify
if(log.isInfoEnabled())
log.info("Classifying incrementally with " + numThreads + " threads");
if(log.isInfoEnabled())
log.info("Running saturation");
ExecutorService executor = Executors.newFixedThreadPool(numThreads);
for (int j = 0; j < numThreads; j++) {
Runnable worker = new Worker(todo);
executor.execute(worker);
}
executor.shutdown();
while (!executor.isTerminated()) {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
assert (todo.isEmpty());
// Stop tracking changes in reactivated contexts
for (Context ctx : affectedContexts) {
ctx.endTracking();
}
affectedContexts.removeAll(newContexts);
hasBeenIncrementallyClassified = true;
as.clear();
if(log.isTraceEnabled())
log.trace("Processed " + contextIndex.size() + " contexts");
}
protected int processInclusion(int numNewConcepts, NormalFormGCI nf) {
// Add a context to the context index for every new concept in the axioms being added incrementally
int[] cids = nf.getConceptsInAxiom();
for (int j = 0; j < cids.length; j++) {
int cid = cids[j];
if (!contextIndex.containsKey(cid)) {
Context c = new Context(cid, this);
contextIndex.put(cid, c);
if (c.activate()) {
todo.add(c);
}
if (log.isTraceEnabled()) {
log.trace("Added context " + cid);
}
// Keep track of the newly added contexts
newContexts.add(c);
numNewConcepts++;
}
}
return numNewConcepts;
}
/**
* Processes the axioms in normal form 1 from a set of axioms added
* incrementally and does the following:
*
*
Adds the axioms to the local map.
*
Calculates the new query entries derived from the addition of these
* axioms.
*
Adds query entries to corresponding contexts and activates them.