org.integratedmodelling.engine.modelling.runtime.Scale Maven / Gradle / Ivy
The newest version!
/*******************************************************************************
* Copyright (C) 2007, 2015:
*
* - Ferdinando Villa
* - integratedmodelling.org
* - any other authors listed in @author annotations
*
* All rights reserved. This file is part of the k.LAB software suite,
* meant to enable modular, collaborative, integrated
* development of interoperable data and model components. For
* details, see http://integratedmodelling.org.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the Affero General Public License
* Version 3 or any later version.
*
* This program is distributed in the hope that it will be useful,
* but without any warranty; without even the implied warranty of
* merchantability or fitness for a particular purpose. See the
* Affero General Public License for more details.
*
* You should have received a copy of the Affero General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
* The license is also available at: https://www.gnu.org/licenses/agpl.html
*******************************************************************************/
package org.integratedmodelling.engine.modelling.runtime;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import org.integratedmodelling.api.knowledge.IConcept;
import org.integratedmodelling.api.modelling.IExtent;
import org.integratedmodelling.api.modelling.IModelBean;
import org.integratedmodelling.api.modelling.IScale;
import org.integratedmodelling.api.modelling.ITopologicallyComparable;
import org.integratedmodelling.api.space.ISpatialExtent;
import org.integratedmodelling.api.time.ITemporalExtent;
import org.integratedmodelling.collections.MultidimensionalCursor;
import org.integratedmodelling.collections.MultidimensionalCursor.StorageOrdering;
import org.integratedmodelling.common.configuration.KLAB;
import org.integratedmodelling.common.interfaces.NetworkDeserializable;
import org.integratedmodelling.common.model.runtime.AbstractScale;
import org.integratedmodelling.engine.geospace.Geospace;
import org.integratedmodelling.engine.geospace.extents.SpaceExtent;
import org.integratedmodelling.engine.time.Time;
import org.integratedmodelling.engine.time.extents.RegularTemporalGrid;
import org.integratedmodelling.exceptions.KlabException;
import org.integratedmodelling.exceptions.KlabRuntimeException;
import org.integratedmodelling.exceptions.KlabValidationException;
import org.integratedmodelling.lang.LogicalConnector;
public class Scale extends AbstractScale implements IScale, NetworkDeserializable {
// originalCursor != null means we derive from a previous scale and are representing one slice of it...
private MultidimensionalCursor originalCursor = null;
// ... identified by this offset...
private int sliceOffset = -1;
// ... along this dimension
private int sliceDimension = -1;
/**
* Create a new scale from zero or more extents.
* @param topologies
*/
public Scale(IExtent... topologies) {
for (IExtent e : topologies) {
mergeExtent(e, true);
}
sort();
}
/**
* Create a new scale from a collection of extents.
*
* @param topologies
*/
public Scale(Collection topologies) {
this(topologies.toArray(new IExtent[topologies.size()]));
}
private Scale(IExtent[] topologies, MultidimensionalCursor cursor, int sliceExtentIndex,
int sliceExtentOffset) throws KlabException {
originalCursor = cursor;
sliceDimension = sliceExtentIndex;
sliceOffset = sliceExtentOffset;
for (IExtent e : topologies) {
mergeExtent(e, true);
}
}
public Scale() {
// TODO Auto-generated constructor stub
}
@Override
public IExtent getExtent(int index) {
return extents.get(index);
}
@Override
public Iterator iterator() {
return extents.iterator();
}
@Override
public long getMultiplicity() {
return multiplicity;
}
@Override
public boolean contains(IScale scale) throws KlabException {
if (!hasSameExtents(scale)) {
return false;
}
for (IExtent e : extents) {
if (!e.contains(((Scale) scale).getExtent(e.getDomainConcept()))) {
return false;
}
}
return true;
}
@Override
public boolean overlaps(IScale scale) throws KlabException {
if (!hasSameExtents(scale)) {
return false;
}
for (IExtent e : extents) {
if (!e.overlaps(((Scale) scale).getExtent(e.getDomainConcept()))) {
return false;
}
}
return true;
}
@Override
public boolean intersects(IScale scale) throws KlabException {
if (!hasSameExtents(scale)) {
return false;
}
for (IExtent e : extents) {
if (!e.intersects(((Scale) scale).getExtent(e.getDomainConcept()))) {
return false;
}
}
return true;
}
@Override
public IScale intersection(IScale scale) throws KlabException {
if (!hasSameExtents(scale)) {
return null;
}
Scale ret = new Scale();
for (IExtent e : extents) {
ret.mergeExtent(e.intersection(((Scale) scale).getExtent(e.getDomainConcept())), false);
}
return ret;
}
@Override
public IScale union(IScale scale) throws KlabException {
if (!hasSameExtents(scale)) {
return null;
}
Scale ret = new Scale();
for (IExtent e : extents) {
ret.mergeExtent(e.union(((Scale) scale).getExtent(e.getDomainConcept())), false);
}
return ret;
}
/**
* @param extent
* @param force
*/
public void mergeExtent(IExtent extent, boolean force) {
IExtent merged = null;
int i = 0;
for (IExtent e : extents) {
if (e.getDomainConcept().equals(extent.getDomainConcept())) {
try {
merged = e.merge(extent, force);
} catch (KlabException e1) {
throw new KlabRuntimeException(e1);
}
break;
}
i++;
}
if (merged != null) {
extents.add(i, merged);
} else {
try {
extents.add(KLAB.MFACTORY.sanitizeExtent(extent));
} catch (KlabException e1) {
throw new KlabRuntimeException(e1);
}
}
sort();
}
/**
* Return a collection of scales with multiplicity 1, one per each combination of the extent states we
* represent.
*
* @return disaggregated scales
* @throws KlabException
*/
public Collection disaggregate() throws KlabException {
ArrayList ret = new ArrayList();
int[] dims = new int[extents.size()];
for (int i = 0; i < dims.length; i++) {
dims[i] = (int) extents.get(i).getMultiplicity();
}
MultidimensionalCursor cursor = new MultidimensionalCursor();
cursor.defineDimensions(dims);
for (int i = 0; i < cursor.getMultiplicity(); i++) {
IExtent[] exts = new IExtent[dims.length];
int[] idx = cursor.getElementIndexes(i);
for (int j = 0; j < exts.length; j++) {
exts[j] = extents.get(j).getExtent(idx[j]);
}
ret.add(new Scale(exts));
}
return ret;
}
/*
* quick access to "current" T state index for given offset - not in the API for now.
*/
public int getTimeIndex(int globalIndex) {
return tIndex == -1 ? -1 : cursor.getElementIndexes(globalIndex)[tIndex];
}
/*
* quick access to "current" S state index for given offset - not in the API for now.
*/
public int getSpaceIndex(int globalIndex) {
return sIndex == -1 ? -1 : cursor.getElementIndexes(globalIndex)[sIndex];
}
/*
* quick access to "current" arbitrary state index for given offset - not in the API for now.
*/
@Override
public int[] getExtentIndex(int globalIndex) {
return cursor.getElementIndexes(globalIndex);
}
/*
* true if the passed scale has the same extents as we do.
*/
boolean hasSameExtents(IScale scale) {
for (IExtent e : scale) {
if (getExtent(e.getDomainConcept()) == null) {
return false;
}
}
for (IExtent e : extents) {
if (((Scale) scale).getExtent(e.getDomainConcept()) == null) {
return false;
}
}
return true;
}
/*
* get the extent with the passed domain concept
*/
@Override
public IExtent getExtent(IConcept domainConcept) {
for (IExtent e : extents) {
if (e.getDomainConcept().equals(domainConcept)) {
return e;
}
}
return null;
}
// /**
// * Scan all extents and return the properties and values, if any, that describe their coverage for
// search
// * and retrieval of compatible extents.
// *
// * It works by asking each extent for its storage metadata and returning any metadata that is indexed by
// a
// * known property and points to a topologically comparable object.
// *
// * Relies on the fact that each extent has only one topologically comparable storage metadata. Throws an
// * unchecked exception if not so.
// *
// * @return
// * @throws ThinklabException
// */
// public List>> getCoverageProperties(IMonitor monitor)
// throws ThinklabException {
// ArrayList>> ret = new ArrayList>>();
// for (IExtent ext : _extents) {
// int ncov = 0;
// if (ext instanceof IStorageMetadataProvider) {
// Metadata md = new Metadata();
// ((IStorageMetadataProvider) ext).addStorageMetadata(md, monitor);
// for (String pid : md.getKeys()) {
// if (Thinklab.get().getProperty(pid) != null
// && md.get(pid) instanceof ITopologicallyComparable) {
//
// if (ncov > 0) {
//
// /*
// * this is an obscure one for sure, but it should not really happen unless the
// * implementation is screwed up and untested.
// */
// throw new ThinklabRuntimeException(
// "internal: extent provides more than one topologically comparable storage metadata");
// }
//
// ret.add(new Pair>(Thinklab.p(pid),
// (ITopologicallyComparable) md.get(pid)));
// ncov++;
// }
// }
// }
// }
// return ret;
// }
//
// /**
// * Return the scale without time, self if we don't see time.
// *
// * @return
// */
// @Override
// public IScale getNonDynamicScale() {
//
// if (getTime() == null) {
// return this;
// }
//
// int i = 0;
// IExtent[] exts = new IExtent[_extents.size() - 1];
// for (IExtent e : _extents) {
// if (e.getDomainConcept().equals(Time.TIME_DOMAIN)) {
// continue;
// }
// exts[i++] = e;
// }
// try {
// return new Scale(exts);
// } catch (ThinklabException e1) {
// // shouldn't happen if we get as far as this.
// throw new ThinklabRuntimeException(e1);
// }
// }
public List getExtents() {
return extents;
}
/**
* Return the proportion of coverage of the extent that is covered the least by the corresponding extent
* in the passed scale.
*
* @param context
* @return coverage
*/
public double getCoverage(IScale context) {
// TODO Auto-generated method stub
return 1.0;
}
/**
* Return the proportion of coverage that the passed scale would add to the coverage of our own extents.
*
* @param mcov
* @return additional coverage
*/
public double getAdditionalCoverage(Scale mcov) {
// TODO Auto-generated method stub
return 1.0;
}
@Override
public ITopologicallyComparable union(ITopologicallyComparable other)
throws KlabException {
if (!(other instanceof Scale)) {
throw new KlabValidationException(other + " intersected with a Scale");
}
return merge((Scale) other, LogicalConnector.UNION, true);
}
@Override
public ITopologicallyComparable intersection(ITopologicallyComparable other)
throws KlabException {
if (!(other instanceof Scale)) {
throw new KlabValidationException(other + " intersected with a Scale");
}
return merge((Scale) other, LogicalConnector.INTERSECTION, true);
}
@Override
public double getCoveredExtent() {
/*
* TODO multiply extents of extents.
*/
return 1;
}
@Override
public IScale merge(IScale scale, LogicalConnector how, boolean adopt) throws KlabException {
Scale other = (Scale) scale;
Scale ret = new Scale();
ArrayList common = new ArrayList();
HashSet commonConcepts = new HashSet();
for (IExtent e : this) {
if (other.getExtent(e.getDomainConcept()) != null) {
common.add(e);
commonConcepts.add(e.getDomainConcept());
} else {
ret.mergeExtent(e, true);
}
}
if (adopt) {
for (IExtent e : other) {
if (adopt && ret.getExtent(e.getDomainConcept()) == null
&& !commonConcepts.contains(e.getDomainConcept())) {
ret.mergeExtent(e, true);
}
}
}
for (IExtent e : common) {
IExtent oext = other.getExtent(e.getDomainConcept());
IExtent merged = null;
if (how.equals(LogicalConnector.INTERSECTION)) {
merged = e.intersection(oext);
} else if (how.equals(LogicalConnector.UNION)) {
merged = e.union(oext);
} else {
throw new KlabValidationException("extents are being merged with illegal operator" + how);
}
ret.mergeExtent(merged, true);
}
return ret;
}
@Override
public String toString() {
String ss = "";
for (IExtent e : extents) {
ss += "<" + e.getDomainConcept() + " # " + e.getMultiplicity() + ">";
}
return "Scale #" + extents.size() + " " + ss;
}
@Override
public boolean isEmpty() {
for (IExtent e : extents) {
if (e.isEmpty()) {
return true;
}
}
return false;
}
// @Override
// public Object adapt() {
// Map ret = new HashMap();
// ret.put("extents", new ArrayList(_extents));
// ret.put("multiplicity", _multiplicity);
// return ret;
// }
@Override
public MultidimensionalCursor getCursor() {
return cursor;
}
@Override
public IScale harmonize(IScale scale) throws KlabException {
// TODO Auto-generated method stub
return scale;
}
@Override
public IScale getSubscale(IConcept extent, int offset) {
int oridx = -1;
ArrayList exts = new ArrayList<>();
for (int i = 0; i < extents.size(); i++) {
if (extents.get(i).getDomainConcept().equals(extent)) {
oridx = i;
continue;
}
exts.add(extents.get(i));
}
if (oridx < 0) {
return this;
}
try {
return new Scale(exts.toArray(new IExtent[exts.size()]), getCursor(), oridx, offset);
} catch (KlabException e1) {
// should never happen since we build it with previously accepted extents.
throw new KlabRuntimeException(e1);
}
}
@Override
public long getOriginalOffset(long subscaleOffset) {
if (originalCursor == null) {
return subscaleOffset;
}
int[] slcofs = getCursor().getElementIndexes((int) subscaleOffset);
int[] orgofs = new int[originalCursor.getDimensionsCount()];
int on = 0;
for (int i = 0; i < orgofs.length; i++) {
orgofs[i] = i == sliceDimension ? sliceOffset : slcofs[on++];
}
return originalCursor.getElementOffset(orgofs);
}
/**
* Take a scale from any origin and return another whose extents are
* guaranteed to be the engine's implementation.
*
* @param scale
* @return sanitized scale
* @throws KlabException
*/
public static IScale sanitize(IScale scale) throws KlabException {
ArrayList extents = new ArrayList<>();
for (IExtent e : scale) {
if (e instanceof ISpatialExtent) {
extents.add(SpaceExtent.sanitize((ISpatialExtent) e));
} else if (e instanceof ITemporalExtent) {
extents.add(RegularTemporalGrid.sanitize((ITemporalExtent) e));
} else {
/*
* nothing yet, but if anything, they should be simple enough to
* have one implementation.
*/
extents.add(e);
}
}
return new Scale(extents.toArray(new IExtent[extents.size()]));
}
public static IScale substituteExtent(IScale scale, IExtent extent) throws KlabException {
List exts = new ArrayList<>();
for (IExtent e : scale) {
if (e.getDomainConcept().equals(extent.getDomainConcept())) {
exts.add(extent);
} else {
exts.add(e);
}
}
return new Scale(exts.toArray(new IExtent[exts.size()]));
}
@Override
public void deserialize(IModelBean object) {
if (!(object instanceof org.integratedmodelling.common.beans.Scale)) {
throw new KlabRuntimeException("cannot deserialize a Scale from a "
+ object.getClass().getCanonicalName());
}
org.integratedmodelling.common.beans.Scale bean = (org.integratedmodelling.common.beans.Scale) object;
extents = new ArrayList<>();
if (bean.getSpace() != null) {
if (bean.getSpace().isForcing()) {
extents.add(KLAB.MFACTORY
.adapt(bean.getSpace(), org.integratedmodelling.common.model.runtime.Space.class));
} else {
extents.add(KLAB.MFACTORY.adapt(bean.getSpace(), SpaceExtent.class));
}
} else if (bean.getTime() != null) {
if (bean.getTime().isForcing()) {
extents.add(KLAB.MFACTORY
.adapt(bean.getTime(), org.integratedmodelling.common.model.runtime.Time.class));
} else {
// TODO this may be another time extent - we should build a polymorphic
// adapter in these.
extents.add(KLAB.MFACTORY.adapt(bean.getTime(), RegularTemporalGrid.class));
}
}
sort();
}
}