ucar.nc2.ft.point.standard.TableAnalyzer Maven / Gradle / Ivy
The newest version!
/*
* Copyright (c) 1998-2018 John Caron and University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package ucar.nc2.ft.point.standard;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Formatter;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.output.Format;
import org.jdom2.output.XMLOutputter;
import ucar.nc2.Dimension;
import ucar.nc2.NetcdfFile;
import ucar.nc2.Structure;
import ucar.nc2.Variable;
import ucar.nc2.constants.AxisType;
import ucar.nc2.constants.CDM;
import ucar.nc2.constants.FeatureType;
import ucar.nc2.dataset.CoordinateAxis;
import ucar.nc2.dataset.NetcdfDataset;
import ucar.nc2.ft.FeatureDatasetFactoryManager;
import ucar.nc2.ft.point.standard.plug.BuoyShipSynop;
import ucar.nc2.ft.point.standard.plug.CFpointObs;
import ucar.nc2.ft.point.standard.plug.CFpointObsExt;
import ucar.nc2.ft.point.standard.plug.CdmDirect;
import ucar.nc2.ft.point.standard.plug.Cosmic;
import ucar.nc2.ft.point.standard.plug.FslRaob;
import ucar.nc2.ft.point.standard.plug.FslWindProfiler;
import ucar.nc2.ft.point.standard.plug.GempakCdm;
import ucar.nc2.ft.point.standard.plug.Iridl;
import ucar.nc2.ft.point.standard.plug.Jason;
import ucar.nc2.ft.point.standard.plug.Madis;
import ucar.nc2.ft.point.standard.plug.MadisAcars;
import ucar.nc2.ft.point.standard.plug.NdbcCoards;
import ucar.nc2.ft.point.standard.plug.Nldn;
import ucar.nc2.ft.point.standard.plug.RafNimbus;
import ucar.nc2.ft.point.standard.plug.SimpleTrajectory;
import ucar.nc2.ft.point.standard.plug.Suomi;
import ucar.nc2.ft.point.standard.plug.UnidataPointObs;
/**
* Analyzes the coordinate systems of a dataset to try to identify the Feature Type and the
* structure of the data.
* Used by PointDatasetStandardFactory.
*
* @author caron
* @since Mar 20, 2008
*/
public class TableAnalyzer {
private static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(TableAnalyzer.class);
private static final List conventionList = new ArrayList<>();
private static boolean userMode;
private static final boolean debug = false;
// search in the order added
static {
registerAnalyzer("CDM", CdmDirect.class, null);
registerAnalyzer(CDM.CF_EXTENDED, CFpointObsExt.class, null);
registerAnalyzer("CF-1.", CFpointObs.class, (convName, wantName) -> {
return convName.startsWith(wantName); // && !convName.equals("CF-1.0"); // throw 1.0 to default analyser
});
registerAnalyzer("GEMPAK/CDM", GempakCdm.class, null);
registerAnalyzer("Unidata Observation Dataset v1.0", UnidataPointObs.class, null);
registerAnalyzer("Cosmic", Cosmic.class, null);
registerAnalyzer("Jason", Jason.class, null);
registerAnalyzer("FslWindProfiler", FslWindProfiler.class, null);
registerAnalyzer("MADIS-ACARS", MadisAcars.class, null); // must be before Madis
registerAnalyzer("MADIS surface observations, v1.0", Madis.class, null); // must be before FslRaob
registerAnalyzer("FSL Raobs", FslRaob.class, null); // must be before FslRaob
registerAnalyzer("IRIDL", Iridl.class, null);
registerAnalyzer("Ndbc", NdbcCoards.class, null);
registerAnalyzer("Suomi-Station-CDM", Suomi.class, null);
registerAnalyzer("BuoyShip-NetCDF", BuoyShipSynop.class, null);
registerAnalyzer("NCAR-RAF/nimbus", RafNimbus.class, null);
registerAnalyzer("NLDN-CDM", Nldn.class, null);
registerAnalyzer("SimpleTrajectory", SimpleTrajectory.class, null);
// further calls to registerConvention are by the user
userMode = true;
}
public static void registerAnalyzer(String conventionName, Class c, ConventionNameOk match) {
if (!(TableConfigurer.class.isAssignableFrom(c)))
throw new IllegalArgumentException("Class " + c.getName() + " must implement TableConfigurer");
// fail fast - check newInstance works
TableConfigurer tc;
try {
tc = (TableConfigurer) c.newInstance();
} catch (InstantiationException e) {
throw new IllegalArgumentException(
"TableConfigurer Class " + c.getName() + " cannot instantiate, probably need default Constructor");
} catch (IllegalAccessException e) {
throw new IllegalArgumentException("TableConfigurer Class " + c.getName() + " is not accessible");
}
Configurator anal = new Configurator(conventionName, c, tc, match);
if (userMode) // user stuff gets put at top
conventionList.add(0, anal);
else
conventionList.add(anal);
}
private interface ConventionNameOk {
boolean isMatch(String convName, String wantName);
}
private static class Configurator {
String convName;
Class confClass;
TableConfigurer confInstance;
ConventionNameOk match;
Configurator(String convName, Class confClass, TableConfigurer confInstance, ConventionNameOk match) {
this.convName = convName;
this.confClass = confClass;
this.confInstance = confInstance;
this.match = match;
}
}
private static Configurator matchConfigurator(String convName) {
for (Configurator anal : conventionList) {
if ((anal.match == null) && anal.convName.equalsIgnoreCase(convName))
return anal;
if ((anal.match != null) && anal.match.isMatch(convName, anal.convName))
return anal;
}
return null;
}
/**
* Find a TableConfigurer for this dataset, if there is one.
*
* @param wantFeatureType want this FeatureType
* @param ds for this dataset
* @return TableConfigurer or null if not found
*/
public static TableConfigurer getTableConfigurer(FeatureType wantFeatureType, NetcdfDataset ds) {
String convUsed = null;
// search for the Conventions attribute
String convName = ds.getRootGroup().findAttributeString(CDM.CONVENTIONS, null);
if (convName == null)
convName = ds.getRootGroup().findAttributeString("Convention", null);
// now search for TableConfigurer using that Convention
Configurator anal = null;
if (convName != null) {
convName = convName.trim();
// search for Convention parsing class
anal = matchConfigurator(convName);
if (anal != null) {
convUsed = convName;
if (debug)
System.out.println(" TableConfigurer found using convName " + convName);
}
// now search for comma or semicolon or / delimited list
if (anal == null) {
List names = new ArrayList<>();
if ((convName.indexOf(',') > 0) || (convName.indexOf(';') > 0)) {
StringTokenizer stoke = new StringTokenizer(convName, ",;");
while (stoke.hasMoreTokens()) {
String name = stoke.nextToken();
names.add(name.trim());
}
} else if ((convName.indexOf('/') > 0)) {
StringTokenizer stoke = new StringTokenizer(convName, "/");
while (stoke.hasMoreTokens()) {
String name = stoke.nextToken();
names.add(name.trim());
}
}
if (!names.isEmpty()) {
// search the registered conventions, in order
for (Configurator conv : conventionList) {
for (String name : names) {
if (name.equalsIgnoreCase(conv.convName)) {
anal = conv;
convUsed = name;
if (debug)
System.out.println(" TableConfigurer found using convName " + convName);
}
}
if (anal != null)
break;
}
}
}
}
// search for ones that dont use Convention attribute, in order added.
// call method isMine() using reflection.
if (anal == null) {
for (Configurator conv : conventionList) {
Class c = conv.confClass;
Method isMineMethod;
try {
isMineMethod = c.getMethod("isMine", FeatureType.class, NetcdfDataset.class);
} catch (NoSuchMethodException ex) {
continue;
}
try {
Boolean result = (Boolean) isMineMethod.invoke(conv.confInstance, wantFeatureType, ds);
if (debug)
System.out.println(" TableConfigurer.isMine " + c.getName() + " result = " + result);
if (result) {
anal = conv;
convUsed = conv.convName;
break;
}
} catch (Exception ex) {
log.error("Class {} exception invoking isMine method", c.getName(), ex);
}
}
}
// Instantiate a new TableConfigurer object
TableConfigurer tc = null;
if (anal != null) {
try {
tc = (TableConfigurer) anal.confClass.newInstance();
tc.setConvName(convName);
tc.setConvUsed(convUsed);
} catch (InstantiationException | IllegalAccessException e) {
log.error("TableConfigurer create failed", e);
}
}
return tc;
}
/**
* Create a TableAnalyser for this dataset with the given TableConfigurer
*
* @param tc TableConfigurer, may be null.
* @param wantFeatureType want this FeatureType
* @param ds for this dataset
* @return TableAnalyser
* @throws IOException on read error
*/
public static TableAnalyzer factory(TableConfigurer tc, FeatureType wantFeatureType, NetcdfDataset ds)
throws IOException {
// Create a TableAnalyzer with this TableConfigurer (may be null)
TableAnalyzer analyzer = new TableAnalyzer(ds, tc);
if (tc != null) {
if (tc.getConvName() == null)
analyzer.userAdvice.format(" No 'Conventions' global attribute.%n");
else
analyzer.userAdvice.format(" Conventions global attribute = %s %n", tc.getConvName());
// add the convention name used
if (tc.getConvUsed() != null) {
analyzer.setConventionUsed(tc.getConvUsed());
if (!tc.getConvUsed().equals(tc.getConvName()))
analyzer.userAdvice.format(" TableConfigurer used = " + tc.getConvUsed() + ".%n");
}
} else {
analyzer.userAdvice.format(" No TableConfigurer found, using default analysis.%n");
}
// construct the nested table object
analyzer.analyze(wantFeatureType);
return analyzer;
}
////////////////////////////////////////////////////////////////////////////////////////////////
private TableConfigurer tc;
private NetcdfDataset ds;
private Map tableFind = new HashMap<>();
private Set tableSet = new HashSet<>();
private List leaves = new ArrayList<>();
private FeatureType ft;
private TableConfig configResult;
private TableAnalyzer(NetcdfDataset ds, TableConfigurer tc) {
this.tc = tc;
this.ds = ds;
if (tc == null)
userAdvice.format("Using default TableConfigurer.%n");
}
public List getFlatTables() {
return leaves;
}
public boolean featureTypeOk(FeatureType ftype, Formatter errlog) {
for (NestedTable nt : leaves) {
if (!nt.hasCoords()) {
errlog.format("Table %s featureType %s: lat/lon/time coord not found%n", nt.getName(), nt.getFeatureType());
writeConfigXML(errlog);
}
if (!FeatureDatasetFactoryManager.featureTypeOk(ftype, nt.getFeatureType()))
errlog.format("Table %s featureType %s doesnt match desired type %s%n", nt.getName(), nt.getFeatureType(),
ftype);
if (nt.hasCoords() && FeatureDatasetFactoryManager.featureTypeOk(ftype, nt.getFeatureType()))
return true;
}
return false;
}
public String getName() {
if (tc != null)
return tc.getClass().getName();
return "Default";
}
// for debugging messages
public FeatureType getFirstFeatureType() {
for (NestedTable nt : leaves) {
if (nt.hasCoords())
return nt.getFeatureType();
}
return null;
}
public NetcdfDataset getNetcdfDataset() {
return ds;
}
private Formatter userAdvice = new Formatter();
private Formatter errlog = new Formatter();
public String getUserAdvice() {
return userAdvice.toString();
}
public String getErrlog() {
return errlog.toString();
}
private String conventionName;
private void setConventionUsed(String convName) {
this.conventionName = convName;
}
TableConfig getTableConfig() {
return this.configResult;
}
TableConfigurer getTableConfigurer() {
return tc;
}
/////////////////////////////////////////////////////////
/**
* Make a NestedTable object for the dataset.
*
* @param wantFeatureType want this FeatureType
* @throws IOException on read error
*/
private void analyze(FeatureType wantFeatureType) throws IOException {
// for netcdf-3 files, convert record dimension to structure
// LOOK may be problems when served via opendap.
// LOOK this wont work in ver6.
boolean structAdded = (Boolean) ds.sendIospMessage(NetcdfFile.IOSP_MESSAGE_ADD_RECORD_STRUCTURE);
if (tc == null) {
makeTablesDefault(structAdded);
makeNestedTables();
} else {
configResult = tc.getConfig(wantFeatureType, ds, errlog);
if (configResult != null)
addTableRecurse(configResult); // kinda stupid
else { // use default
makeTablesDefault(structAdded);
makeNestedTables();
}
}
// find the leaves
for (TableConfig config : tableSet) {
if (config.children == null) { // its a leaf
NestedTable flatTable = new NestedTable(ds, config, errlog);
leaves.add(flatTable);
}
}
}
private void addTable(TableConfig t) {
tableFind.put(t.name, t);
if (t.dimName != null)
tableFind.put(t.dimName, t);
tableSet.add(t);
}
private void addTableRecurse(TableConfig t) {
addTable(t);
if (t.children != null) {
for (TableConfig child : t.children)
addTableRecurse(child);
}
}
///////////////////////////////////////////////////////////
// default analasis aka guessing
// no TableConfig was passed in - gotta wing it
private void makeTablesDefault(boolean structAdded) {
// make Structures into a table
List vars = new ArrayList<>(ds.getVariables());
Iterator iter = vars.iterator();
while (iter.hasNext()) {
Variable v = iter.next();
if (v instanceof Structure) { // handles Sequences too
TableConfig st = new TableConfig(Table.Type.Structure, v.getFullName());
CoordSysEvaluator.findCoords(st, ds, null);
st.structName = v.getFullName();
st.nestedTableName = v.getShortName();
addTable(st);
checkIfTrajectory(st);
iter.remove();
findNestedStructures((Structure) v, st); // look for nested structures
} else if (structAdded && v.isUnlimited()) {
iter.remove();
}
}
if (!tableSet.isEmpty())
return;
// search at dimensions that lat, lon, time coordinates use
Set dimSet = new HashSet<>(10);
for (CoordinateAxis axis : ds.getCoordinateAxes()) {
if ((axis.getAxisType() == AxisType.Lat) || (axis.getAxisType() == AxisType.Lon)
|| (axis.getAxisType() == AxisType.Time))
dimSet.addAll(axis.getDimensions());
}
// lat, lon, time all use same dimension - use it
if (dimSet.size() == 1) {
Dimension obsDim = (Dimension) dimSet.toArray()[0];
TableConfig st = new TableConfig(Table.Type.Structure, obsDim.getShortName());
st.structureType =
obsDim.isUnlimited() ? TableConfig.StructureType.Structure : TableConfig.StructureType.PsuedoStructure;
st.structName = obsDim.isUnlimited() ? "record" : obsDim.getShortName();
st.dimName = obsDim.getShortName();
CoordSysEvaluator.findCoords(st, ds, axis -> obsDim.equals(axis.getDimension(0)));
CoordinateAxis time = CoordSysEvaluator.findCoordByType(ds, AxisType.Time);
if ((time != null) && (time.getRank() == 0)) {
st.addJoin(new JoinArray(time, JoinArray.Type.scalar, 0));
st.time = time.getShortName();
}
addTable(st);
checkIfTrajectory(st);
}
if (!tableSet.isEmpty())
return;
// try the time dimension
CoordinateAxis time = null;
for (CoordinateAxis axis : ds.getCoordinateAxes()) {
if ((axis.getAxisType() == AxisType.Time) && axis.isIndependentCoordinate()) {
time = axis;
break;
}
}
if (time != null) {
Dimension obsDim = time.getDimension(0);
TableConfig st = new TableConfig(Table.Type.Structure, obsDim.getShortName());
st.structureType = TableConfig.StructureType.PsuedoStructure;
st.dimName = obsDim.getShortName();
CoordSysEvaluator.findCoords(st, ds, null);
addTable(st);
}
}
private void checkIfTrajectory(TableConfig st) {
// deal with possible trajectory - only do this if dataset has metadata
FeatureType ft = FeatureDatasetFactoryManager.findFeatureType(ds);
if (ft == FeatureType.TRAJECTORY) {
st.featureType = FeatureType.TRAJECTORY;
TableConfig pc = new TableConfig(Table.Type.Top, "single");
st.parent = pc;
pc.addChild(st);
} else
st.featureType = FeatureType.POINT;
}
private void findNestedStructures(Structure s, TableConfig parent) {
for (Variable v : s.getVariables()) {
if (v instanceof Structure) { // handles Sequences too
TableConfig nestedTable = new TableConfig(Table.Type.NestedStructure, v.getFullName());
nestedTable.structName = v.getFullName();
nestedTable.nestedTableName = v.getShortName();
addTable(nestedTable);
parent.addChild(nestedTable);
// LOOK why not add the join(parent,child) here ?
// nestedTable.join = new TableConfig.JoinConfig(Join.Type.NestedStructure);
// joins.add(nestedTable.join);
findNestedStructures((Structure) v, nestedTable); // search for nested structures
}
}
}
private void makeNestedTables() {
// We search among all the possible Tables in a dataset for joins, and coordinate
// variables. Based on those, we form "interesting" sets and make them into NestedTables.
/*
* link the tables together with joins
* for (TableConfig.JoinConfig join : joins) {
* NestedTable.Table parent = join.parent;
* NestedTable.Table child = join.child;
*
* if (child.parent != null) throw new IllegalStateException("Multiple parents");
* child.parent = parent;
* child.join = join;
*
* if (parent.children == null) parent.children = new ArrayList();
* parent.children.add(join);
* }
*/
}
/////////////////////////////////////////////////////
/*
* track station info
*
* private StationInfo stationInfo = new StationInfo();
*
* private StationInfo getStationInfo() {
* return stationInfo;
* }
*
* public class StationInfo {
* public String stationId, stationDesc, stationNpts;
* public int nstations;
* public String latName, lonName, elevName;
* }
*/
public void showNestedTables(java.util.Formatter sf) {
for (NestedTable nt : leaves) {
nt.show(sf);
}
}
public String getImplementationName() {
return (tc != null) ? tc.getClass().getSimpleName() : "defaultAnalyser";
}
public void getDetailInfo(java.util.Formatter sf) {
sf.format("-----------------------------------------------------%nTableAnalyzer on Dataset %s%n", ds.getLocation());
sf.format(" TableAnalyser = %s%n", getName());
showNestedTables(sf);
String errlogS = errlog.toString();
if (!errlogS.isEmpty())
sf.format("%n Errlog=%n%s", errlogS);
String userAdviceS = userAdvice.toString();
if (!userAdviceS.isEmpty())
sf.format("%n userAdvice=%n%s%n", userAdviceS);
writeConfigXML(sf);
}
private void writeConfigXML(java.util.Formatter sf) {
if (configResult != null) {
PointConfigXML tcx = new PointConfigXML();
tcx.writeConfigXML(configResult, getName(), sf);
return;
}
XMLOutputter fmt = new XMLOutputter(Format.getPrettyFormat());
sf.format("%s", fmt.outputString(makeDocument()));
}
/**
* Create an XML document from this info
*
* @return netcdfDatasetInfo XML document
*/
private Document makeDocument() {
Element rootElem = new Element("featureDataset");
Document doc = new Document(rootElem);
rootElem.addContent(new Element("analyser").setAttribute("class", getName()));
if (ft != null)
rootElem.setAttribute("featureType", ft.toString());
for (NestedTable nt : leaves) {
writeTable(rootElem, nt.getLeaf());
}
return doc;
}
private Element writeTable(Element parent, Table table) {
if (table.parent != null) {
parent = writeTable(parent, table.parent);
}
Element tableElem = new Element("table");
parent.addContent(tableElem);
if (table.getName() != null)
tableElem.setAttribute("name", table.getName());
if (table.getFeatureType() != null)
tableElem.setAttribute("featureType", table.getFeatureType().toString());
tableElem.setAttribute("class", table.getClass().toString());
addCoordinates(tableElem, table);
for (String colName : table.cols.keySet()) {
if (!table.nondataVars.contains(colName))
tableElem.addContent(new Element("variable").addContent(colName));
}
if (table.extraJoins != null) {
for (Join j : table.extraJoins) {
if (j instanceof JoinArray)
tableElem.addContent(writeJoinArray((JoinArray) j));
else if (j instanceof JoinMuiltdimStructure)
tableElem.addContent(writeJoinMuiltdimStructure((JoinMuiltdimStructure) j));
else if (j instanceof JoinParentIndex)
tableElem.addContent(writeJoinParentIndex((JoinParentIndex) j));
}
}
return tableElem;
}
private void addCoordinates(Element tableElem, Table table) {
addCoord(tableElem, table.lat, "lat");
addCoord(tableElem, table.lon, "lon");
addCoord(tableElem, table.elev, "elev");
addCoord(tableElem, table.time, "time");
addCoord(tableElem, table.timeNominal, "timeNominal");
addCoord(tableElem, table.stnId, "stnId");
addCoord(tableElem, table.stnDesc, "stnDesc");
addCoord(tableElem, table.stnNpts, "stnNpts");
addCoord(tableElem, table.stnWmoId, "stnWmoId");
addCoord(tableElem, table.stnAlt, "stnAlt");
addCoord(tableElem, table.limit, "limit");
}
private void addCoord(Element tableElem, String name, String kind) {
if (name != null) {
Element elem = new Element("coordinate").setAttribute("kind", kind);
elem.addContent(name);
tableElem.addContent(elem);
}
}
private Element writeJoinArray(JoinArray join) {
Element joinElem = new Element("join");
joinElem.setAttribute("class", join.getClass().toString());
if (join.type != null)
joinElem.setAttribute("type", join.type.toString());
if (join.v != null)
joinElem.addContent(new Element("variable").setAttribute("name", join.v.getFullName()));
joinElem.addContent(new Element("param").setAttribute("value", Integer.toString(join.param)));
return joinElem;
}
private Element writeJoinMuiltdimStructure(JoinMuiltdimStructure join) {
Element joinElem = new Element("join");
joinElem.setAttribute("class", join.getClass().toString());
if (join.parentStructure != null)
joinElem.addContent(new Element("parentStructure").setAttribute("name", join.parentStructure.getFullName()));
joinElem.addContent(new Element("dimLength").setAttribute("value", Integer.toString(join.dimLength)));
return joinElem;
}
private Element writeJoinParentIndex(JoinParentIndex join) {
Element joinElem = new Element("join");
joinElem.setAttribute("class", join.getClass().toString());
if (join.parentStructure != null)
joinElem.addContent(new Element("parentStructure").setAttribute("name", join.parentStructure.getFullName()));
if (join.parentIndex != null)
joinElem.addContent(new Element("parentIndex").setAttribute("name", join.parentIndex));
return joinElem;
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy