Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see .
*/
/*
* EditableBayesNet.java
* Copyright (C) 2012 University of Waikato, Hamilton, New Zealand
*
*/
package weka.classifiers.bayes.net;
import java.io.Serializable;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.StringTokenizer;
import javax.xml.parsers.DocumentBuilderFactory;
import org.w3c.dom.CharacterData;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import weka.classifiers.bayes.BayesNet;
import weka.classifiers.bayes.net.estimate.DiscreteEstimatorBayes;
import weka.core.Attribute;
import weka.core.Instances;
import weka.core.RevisionUtils;
import weka.core.SerializedObject;
import weka.estimators.Estimator;
import weka.filters.Filter;
import weka.filters.unsupervised.attribute.Reorder;
/**
* Bayes Network learning using various search
* algorithms and quality measures.
* Base class for a Bayes Network classifier. Provides datastructures (network
* structure, conditional probability distributions, etc.) and facilities common
* to Bayes Network learning algorithms like K2 and B.
*
* For more information see:
*
* http://www.cs.waikato.ac.nz/~remco/weka.pdf
*
*
*
* Valid options are:
*
*
*
*
*
*
* @author Remco Bouckaert ([email protected])
* @version $Revision: 10153 $
*/
public class EditableBayesNet extends BayesNet {
/** for serialization */
static final long serialVersionUID = 746037443258735954L;
/** location of nodes, used for graph drawing * */
protected ArrayList m_nPositionX;
protected ArrayList m_nPositionY;
/** marginal distributions * */
protected ArrayList m_fMarginP;
/** evidence values, used for evidence propagation * */
protected ArrayList m_nEvidence;
/** standard constructor * */
public EditableBayesNet() {
super();
m_nEvidence = new ArrayList(0);
m_fMarginP = new ArrayList(0);
m_nPositionX = new ArrayList();
m_nPositionY = new ArrayList();
clearUndoStack();
} // c'tor
/**
* constructor, creates empty network with nodes based on the attributes in a
* data set
*/
public EditableBayesNet(Instances instances) {
try {
if (instances.classIndex() < 0) {
instances.setClassIndex(instances.numAttributes() - 1);
}
m_Instances = normalizeDataSet(instances);
} catch (Exception e) {
e.printStackTrace();
}
int nNodes = getNrOfNodes();
m_ParentSets = new ParentSet[nNodes];
for (int i = 0; i < nNodes; i++) {
m_ParentSets[i] = new ParentSet();
}
m_Distributions = new Estimator[nNodes][];
for (int iNode = 0; iNode < nNodes; iNode++) {
m_Distributions[iNode] = new Estimator[1];
m_Distributions[iNode][0] = new DiscreteEstimatorBayes(
getCardinality(iNode), 0.5);
}
m_nEvidence = new ArrayList(nNodes);
for (int i = 0; i < nNodes; i++) {
m_nEvidence.add(-1);
}
m_fMarginP = new ArrayList(nNodes);
for (int i = 0; i < nNodes; i++) {
double[] P = new double[getCardinality(i)];
m_fMarginP.add(P);
}
m_nPositionX = new ArrayList(nNodes);
m_nPositionY = new ArrayList(nNodes);
for (int iNode = 0; iNode < nNodes; iNode++) {
m_nPositionX.add(iNode % 10 * 50);
m_nPositionY.add((iNode / 10) * 50);
}
} // c'tor
/**
* constructor, copies Bayesian network structure from a Bayesian network
* encapsulated in a BIFReader
*/
public EditableBayesNet(BIFReader other) {
m_Instances = other.m_Instances;
m_ParentSets = other.getParentSets();
m_Distributions = other.getDistributions();
int nNodes = getNrOfNodes();
m_nPositionX = new ArrayList(nNodes);
m_nPositionY = new ArrayList(nNodes);
for (int i = 0; i < nNodes; i++) {
m_nPositionX.add(other.m_nPositionX[i]);
m_nPositionY.add(other.m_nPositionY[i]);
}
m_nEvidence = new ArrayList(nNodes);
for (int i = 0; i < nNodes; i++) {
m_nEvidence.add(-1);
}
m_fMarginP = new ArrayList(nNodes);
for (int i = 0; i < nNodes; i++) {
double[] P = new double[getCardinality(i)];
m_fMarginP.add(P);
}
clearUndoStack();
} // c'tor
/**
* constructor that potentially initializes instances as well
*
* @param bSetInstances flag indicating whether to initialize instances or not
*/
public EditableBayesNet(boolean bSetInstances) {
super();
m_nEvidence = new ArrayList(0);
m_fMarginP = new ArrayList(0);
m_nPositionX = new ArrayList();
m_nPositionY = new ArrayList();
clearUndoStack();
if (bSetInstances) {
m_Instances = new Instances("New Network", new ArrayList(0), 0);
}
} // c'tor
/**
* Assuming a network structure is defined and we want to learn from data, the
* data set must be put if correct order first and possibly
* discretized/missing values filled in before proceeding to CPT learning.
*
* @param instances data set to learn from
* @exception Exception when data sets are not compatible, e.g., a variable is
* missing or a variable has different nr of values.
*/
public void setData(Instances instances) throws Exception {
// sync order of variables
int[] order = new int[getNrOfNodes()];
for (int iNode = 0; iNode < getNrOfNodes(); iNode++) {
String sName = getNodeName(iNode);
int nNode = 0;
while (nNode < getNrOfNodes()
&& !sName.equals(instances.attribute(nNode).name())) {
nNode++;
}
if (nNode >= getNrOfNodes()) {
throw new Exception("Cannot find node named [[[" + sName
+ "]]] in the data");
}
order[iNode] = nNode;
}
Reorder reorderFilter = new Reorder();
reorderFilter.setAttributeIndicesArray(order);
reorderFilter.setInputFormat(instances);
instances = Filter.useFilter(instances, reorderFilter);
// filter using discretization/missing values filter
Instances newInstances = new Instances(m_Instances, 0);
if (m_DiscretizeFilter == null && m_MissingValuesFilter == null) {
newInstances = normalizeDataSet(instances);
} else {
for (int iInstance = 0; iInstance < instances.numInstances(); iInstance++) {
newInstances.add(normalizeInstance(instances.instance(iInstance)));
}
}
// sanity check
for (int iNode = 0; iNode < getNrOfNodes(); iNode++) {
if (newInstances.attribute(iNode).numValues() != getCardinality(iNode)) {
throw new Exception("Number of values of node [[[" + getNodeName(iNode)
+ "]]] differs in (discretized) dataset.");
}
}
// if we got this far, all is ok with the data set and
// we can replace data set of Bayes net
m_Instances = newInstances;
} // setData
/**
* returns index of node with given name, or -1 if no such node exists
*
* @param sNodeName name of the node to get index for
*/
public int getNode2(String sNodeName) {
int iNode = 0;
while (iNode < m_Instances.numAttributes()) {
if (m_Instances.attribute(iNode).name().equals(sNodeName)) {
return iNode;
}
iNode++;
}
return -1;
} // getNode2
/**
* returns index of node with given name. Throws exception if no such node
* exists
*
* @param sNodeName name of the node to get index for
*/
public int getNode(String sNodeName) throws Exception {
int iNode = getNode2(sNodeName);
if (iNode < 0) {
throw new Exception("Could not find node [[" + sNodeName + "]]");
}
return iNode;
} // getNode
/**
* Add new node to the network, initializing instances, parentsets,
* distributions. Used for manual manipulation of the Bayesian network.
*
* @param sName name of the node. If the name already exists, an x is appended
* to the name
* @param nCardinality number of values for this node
* @throws Exception
*/
public void addNode(String sName, int nCardinality) throws Exception {
addNode(sName, nCardinality, 100 + getNrOfNodes() * 10,
100 + getNrOfNodes() * 10);
} // addNode
/**
* Add node to network at a given position, initializing instances,
* parentsets, distributions. Used for manual manipulation of the Bayesian
* network.
*
* @param sName name of the node. If the name already exists, an x is appended
* to the name
* @param nCardinality number of values for this node
* @param nPosX x-coordiate of the position to place this node
* @param nPosY y-coordiate of the position to place this node
* @throws Exception
*/
public void addNode(String sName, int nCardinality, int nPosX, int nPosY)
throws Exception {
if (getNode2(sName) >= 0) {
addNode(sName + "x", nCardinality);
return;
}
// update instances
ArrayList values = new ArrayList(nCardinality);
for (int iValue = 0; iValue < nCardinality; iValue++) {
values.add("Value" + (iValue + 1));
}
Attribute att = new Attribute(sName, values);
m_Instances.insertAttributeAt(att, m_Instances.numAttributes());
int nAtts = m_Instances.numAttributes();
// update parentsets
ParentSet[] parentSets = new ParentSet[nAtts];
for (int iParentSet = 0; iParentSet < nAtts - 1; iParentSet++) {
parentSets[iParentSet] = m_ParentSets[iParentSet];
}
parentSets[nAtts - 1] = new ParentSet();
m_ParentSets = parentSets;
// update distributions
Estimator[][] distributions = new Estimator[nAtts][];
for (int iNode = 0; iNode < nAtts - 1; iNode++) {
distributions[iNode] = m_Distributions[iNode];
}
distributions[nAtts - 1] = new Estimator[1];
distributions[nAtts - 1][0] = new DiscreteEstimatorBayes(nCardinality, 0.5);
m_Distributions = distributions;
// update positions
m_nPositionX.add(nPosX);
m_nPositionY.add(nPosY);
// update evidence & margins
m_nEvidence.add(-1);
double[] fMarginP = new double[nCardinality];
for (int iValue = 0; iValue < nCardinality; iValue++) {
fMarginP[iValue] = 1.0 / nCardinality;
}
m_fMarginP.add(fMarginP);
// update undo stack
if (m_bNeedsUndoAction) {
addUndoAction(new AddNodeAction(sName, nCardinality, nPosX, nPosY));
}
} // addNode
/**
* Delete node from the network, updating instances, parentsets, distributions
* Conditional distributions are condensed by taking the values for the target
* node to be its first value. Used for manual manipulation of the Bayesian
* network.
*
* @param sName name of the node. If the name does not exists an exception is
* thrown
* @throws Exception
*/
public void deleteNode(String sName) throws Exception {
int nTargetNode = getNode(sName);
deleteNode(nTargetNode);
} // deleteNode
/**
* Delete node from the network, updating instances, parentsets, distributions
* Conditional distributions are condensed by taking the values for the target
* node to be its first value. Used for manual manipulation of the Bayesian
* network.
*
* @param nTargetNode index of the node to delete.
* @throws Exception
*/
public void deleteNode(int nTargetNode) throws Exception {
// update undo stack
if (m_bNeedsUndoAction) {
addUndoAction(new DeleteNodeAction(nTargetNode));
}
int nAtts = m_Instances.numAttributes() - 1;
int nTargetCard = m_Instances.attribute(nTargetNode).numValues();
// update distributions
Estimator[][] distributions = new Estimator[nAtts][];
for (int iNode = 0; iNode < nAtts; iNode++) {
int iNode2 = iNode;
if (iNode >= nTargetNode) {
iNode2++;
}
Estimator[] distribution = m_Distributions[iNode2];
if (m_ParentSets[iNode2].contains(nTargetNode)) {
// condense distribution, use values for targetnode = 0
int nParentCard = m_ParentSets[iNode2].getCardinalityOfParents();
nParentCard = nParentCard / nTargetCard;
Estimator[] distribution2 = new Estimator[nParentCard];
for (int iParent = 0; iParent < nParentCard; iParent++) {
distribution2[iParent] = distribution[iParent];
}
distribution = distribution2;
}
distributions[iNode] = distribution;
}
m_Distributions = distributions;
// update parentsets
ParentSet[] parentSets = new ParentSet[nAtts];
for (int iParentSet = 0; iParentSet < nAtts; iParentSet++) {
int iParentSet2 = iParentSet;
if (iParentSet >= nTargetNode) {
iParentSet2++;
}
ParentSet parentset = m_ParentSets[iParentSet2];
parentset.deleteParent(nTargetNode, m_Instances);
for (int iParent = 0; iParent < parentset.getNrOfParents(); iParent++) {
int nParent = parentset.getParent(iParent);
if (nParent > nTargetNode) {
parentset.SetParent(iParent, nParent - 1);
}
}
parentSets[iParentSet] = parentset;
}
m_ParentSets = parentSets;
// update instances
m_Instances.setClassIndex(-1);
m_Instances.deleteAttributeAt(nTargetNode);
m_Instances.setClassIndex(nAtts - 1);
// update positions
m_nPositionX.remove(nTargetNode);
m_nPositionY.remove(nTargetNode);
// update evidence & margins
m_nEvidence.remove(nTargetNode);
m_fMarginP.remove(nTargetNode);
} // deleteNode
/**
* Delete nodes with indexes in selection from the network, updating
* instances, parentsets, distributions Conditional distributions are
* condensed by taking the values for the target node to be its first value.
* Used for manual manipulation of the Bayesian network.
*
* @param nodes array of indexes of nodes to delete.
* @throws Exception
*/
public void deleteSelection(ArrayList nodes) {
// sort before proceeding
for (int i = 0; i < nodes.size(); i++) {
for (int j = i + 1; j < nodes.size(); j++) {
if (nodes.get(i) > nodes.get(j)) {
int h = nodes.get(i);
nodes.set(i, nodes.get(j));
nodes.set(j, h);
}
}
}
// update undo stack
if (m_bNeedsUndoAction) {
addUndoAction(new DeleteSelectionAction(nodes));
}
boolean bNeedsUndoAction = m_bNeedsUndoAction;
m_bNeedsUndoAction = false;
try {
for (int iNode = nodes.size() - 1; iNode >= 0; iNode--) {
deleteNode(nodes.get(iNode));
}
} catch (Exception e) {
e.printStackTrace();
}
m_bNeedsUndoAction = bNeedsUndoAction;
} // deleteSelection
/**
* XML helper function for selecting elements under a node with a given name
*
* @param item XMLNode to select items from
* @param sElement name of the element to return
*/
ArrayList selectElements(Node item, String sElement) throws Exception {
NodeList children = item.getChildNodes();
ArrayList nodelist = new ArrayList();
for (int iNode = 0; iNode < children.getLength(); iNode++) {
Node node = children.item(iNode);
if ((node.getNodeType() == Node.ELEMENT_NODE)
&& node.getNodeName().equals(sElement)) {
nodelist.add(node);
}
}
return nodelist;
} // selectElements
/**
* XML helper function. Returns all TEXT children of the given node in one
* string. Between the node values new lines are inserted.
*
* @param node the node to return the content for
* @return the content of the node
*/
public String getContent(Element node) {
NodeList list;
Node item;
int i;
String result;
result = "";
list = node.getChildNodes();
for (i = 0; i < list.getLength(); i++) {
item = list.item(i);
if (item.getNodeType() == Node.TEXT_NODE) {
result += "\n" + item.getNodeValue();
}
}
return result;
}
/**
* XML helper function that returns DEFINITION element from a XMLBIF document
* for a node with a given name.
*
* @param doc XMLBIF document
* @param sName name of the node to get the definition for
*/
Element getDefinition(Document doc, String sName) throws Exception {
NodeList nodelist = doc.getElementsByTagName("DEFINITION");
for (int iNode = 0; iNode < nodelist.getLength(); iNode++) {
Node node = nodelist.item(iNode);
ArrayList list = selectElements(node, "FOR");
if (list.size() > 0) {
Node forNode = list.get(0);
if (getContent((Element) forNode).trim().equals(sName)) {
return (Element) node;
}
}
}
throw new Exception("Could not find definition for ((" + sName + "))");
} // getDefinition
/**
* Paste modes. This allows for verifying that a past action does not cause
* any problems before actually performing the paste operation.
*/
final static int TEST = 0;
final static int EXECUTE = 1;
/**
* Apply paste operation with XMLBIF fragment. This adds nodes in the XMLBIF
* fragment to the network, together with its parents. First, paste in test
* mode to verify no problems occur, then execute paste operation. If a
* problem occurs (e.g. parent does not exist) then a exception is thrown.
*
* @param sXML XMLBIF fragment to paste into the network
*/
public void paste(String sXML) throws Exception {
try {
paste(sXML, TEST);
} catch (Exception e) {
throw e;
}
paste(sXML, EXECUTE);
} // paste
/**
* Apply paste operation with XMLBIF fragment. Depending on the paste mode,
* the nodes are actually added to the network or it is just tested that the
* nodes can be added to the network.
*
* @param sXML XMLBIF fragment to paste into the network
* @param mode paste mode TEST or EXECUTE
*/
void paste(String sXML, int mode) throws Exception {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setValidating(true);
Document doc = factory.newDocumentBuilder().parse(
new org.xml.sax.InputSource(new StringReader(sXML)));
doc.normalize();
// create nodes first
NodeList nodelist = doc.getElementsByTagName("VARIABLE");
ArrayList sBaseNames = new ArrayList();
Instances instances = new Instances(m_Instances, 0);
int nBase = instances.numAttributes();
for (int iNode = 0; iNode < nodelist.getLength(); iNode++) {
// Get element
ArrayList valueslist;
// Get the name of the node
valueslist = selectElements(nodelist.item(iNode), "OUTCOME");
int nValues = valueslist.size();
// generate value strings
ArrayList nomStrings = new ArrayList(nValues + 1);
for (int iValue = 0; iValue < nValues; iValue++) {
Node node = valueslist.get(iValue).getFirstChild();
String sValue = ((CharacterData) (node)).getData();
if (sValue == null) {
sValue = "Value" + (iValue + 1);
}
nomStrings.add(sValue);
}
ArrayList nodelist2;
// Get the name of the network
nodelist2 = selectElements(nodelist.item(iNode), "NAME");
if (nodelist2.size() == 0) {
throw new Exception("No name specified for variable");
}
String sBaseName = ((CharacterData) (nodelist2.get(0).getFirstChild()))
.getData();
sBaseNames.add(sBaseName);
String sNodeName = sBaseName;
if (getNode2(sNodeName) >= 0) {
sNodeName = "Copy of " + sBaseName;
}
int iAttempt = 2;
while (getNode2(sNodeName) >= 0) {
sNodeName = "Copy (" + iAttempt + ") of " + sBaseName;
iAttempt++;
}
Attribute att = new Attribute(sNodeName, nomStrings);
instances.insertAttributeAt(att, instances.numAttributes());
valueslist = selectElements(nodelist.item(iNode), "PROPERTY");
nValues = valueslist.size();
// generate value strings
int nPosX = iAttempt * 10;
int nPosY = iAttempt * 10;
for (int iValue = 0; iValue < nValues; iValue++) {
// parsing for strings of the form "position = (73, 165)"
Node node = valueslist.get(iValue).getFirstChild();
String sValue = ((CharacterData) (node)).getData();
if (sValue.startsWith("position")) {
int i0 = sValue.indexOf('(');
int i1 = sValue.indexOf(',');
int i2 = sValue.indexOf(')');
String sX = sValue.substring(i0 + 1, i1).trim();
String sY = sValue.substring(i1 + 1, i2).trim();
try {
nPosX = (Integer.parseInt(sX) + iAttempt * 10);
nPosY = (Integer.parseInt(sY) + iAttempt * 10);
} catch (NumberFormatException e) {
System.err.println("Wrong number format in position :(" + sX + ","
+ sY + ")");
}
}
}
if (mode == EXECUTE) {
m_nPositionX.add(nPosX);
m_nPositionY.add(nPosY);
}
}
ArrayList nodelist2;
Estimator[][] distributions = new Estimator[nBase + sBaseNames.size()][];
ParentSet[] parentsets = new ParentSet[nBase + sBaseNames.size()];
for (int iNode = 0; iNode < nBase; iNode++) {
distributions[iNode] = m_Distributions[iNode];
parentsets[iNode] = m_ParentSets[iNode];
}
if (mode == EXECUTE) {
m_Instances = instances;
}
// create arrows & create distributions
for (int iNode = 0; iNode < sBaseNames.size(); iNode++) {
// find definition that goes with this node
String sName = sBaseNames.get(iNode);
Element definition = getDefinition(doc, sName);
parentsets[nBase + iNode] = new ParentSet();
// get the parents for this node
// resolve structure
nodelist2 = selectElements(definition, "GIVEN");
for (int iParent = 0; iParent < nodelist2.size(); iParent++) {
Node parentName = nodelist2.get(iParent).getFirstChild();
String sParentName = ((CharacterData) (parentName)).getData();
int nParent = -1;
for (int iBase = 0; iBase < sBaseNames.size(); iBase++) {
if (sParentName.equals(sBaseNames.get(iBase))) {
nParent = nBase + iBase;
}
}
if (nParent < 0) {
nParent = getNode(sParentName);
}
parentsets[nBase + iNode].addParent(nParent, instances);
}
// resolve conditional probability table
int nCardinality = parentsets[nBase + iNode].getCardinalityOfParents();
int nValues = instances.attribute(nBase + iNode).numValues();
distributions[nBase + iNode] = new Estimator[nCardinality];
for (int i = 0; i < nCardinality; i++) {
distributions[nBase + iNode][i] = new DiscreteEstimatorBayes(nValues,
0.0f);
}
String sTable = getContent((Element) selectElements(definition, "TABLE")
.get(0));
sTable = sTable.replaceAll("\\n", " ");
StringTokenizer st = new StringTokenizer(sTable.toString());
for (int i = 0; i < nCardinality; i++) {
DiscreteEstimatorBayes d = (DiscreteEstimatorBayes) distributions[nBase
+ iNode][i];
for (int iValue = 0; iValue < nValues; iValue++) {
String sWeight = st.nextToken();
d.addValue(iValue, new Double(sWeight).doubleValue());
}
}
if (mode == EXECUTE) {
m_nEvidence.add(nBase + iNode, -1);
m_fMarginP
.add(nBase + iNode, new double[getCardinality(nBase + iNode)]);
}
}
if (mode == EXECUTE) {
m_Distributions = distributions;
m_ParentSets = parentsets;
}
// update undo stack
if (mode == EXECUTE && m_bNeedsUndoAction) {
addUndoAction(new PasteAction(sXML, nBase));
}
} // paste
/**
* Add arc between two nodes Distributions are updated by duplication for
* every value of the parent node.
*
* @param sParent name of the parent node
* @param sChild name of the child node
* @throws Exception if parent or child cannot be found in network
*/
public void addArc(String sParent, String sChild) throws Exception {
int nParent = getNode(sParent);
int nChild = getNode(sChild);
addArc(nParent, nChild);
} // addArc
/**
* Add arc between two nodes Distributions are updated by duplication for
* every value of the parent node.
*
* @param nParent index of the parent node
* @param nChild index of the child node
* @throws Exception
*/
public void addArc(int nParent, int nChild) throws Exception {
// update undo stack
if (m_bNeedsUndoAction) {
addUndoAction(new AddArcAction(nParent, nChild));
}
int nOldCard = m_ParentSets[nChild].getCardinalityOfParents();
// update parentsets
m_ParentSets[nChild].addParent(nParent, m_Instances);
// update distributions
int nNewCard = m_ParentSets[nChild].getCardinalityOfParents();
Estimator[] ds = new Estimator[nNewCard];
for (int iParent = 0; iParent < nNewCard; iParent++) {
ds[iParent] = Estimator
.clone(m_Distributions[nChild][iParent % nOldCard]);
}
m_Distributions[nChild] = ds;
} // addArc
/**
* Add arc between parent node and each of the nodes in a given list.
* Distributions are updated as above.
*
* @param sParent name of the parent node
* @param nodes array of indexes of child nodes
* @throws Exception
*/
public void addArc(String sParent, ArrayList nodes) throws Exception {
int nParent = getNode(sParent);
// update undo stack
if (m_bNeedsUndoAction) {
addUndoAction(new AddArcAction(nParent, nodes));
}
boolean bNeedsUndoAction = m_bNeedsUndoAction;
m_bNeedsUndoAction = false;
for (int iNode = 0; iNode < nodes.size(); iNode++) {
int nNode = nodes.get(iNode);
addArc(nParent, nNode);
}
m_bNeedsUndoAction = bNeedsUndoAction;
} // addArc
/**
* Delete arc between two nodes. Distributions are updated by condensing for
* the parent node taking its first value.
*
* @param sParent name of the parent node
* @param sChild name of the child node
* @throws Exception if parent or child cannot be found in network
*/
public void deleteArc(String sParent, String sChild) throws Exception {
int nParent = getNode(sParent);
int nChild = getNode(sChild);
deleteArc(nParent, nChild);
} // deleteArc
/**
* Delete arc between two nodes. Distributions are updated by condensing for
* the parent node taking its first value.
*
* @param nParent index of the parent node
* @param nChild index of the child node
* @throws Exception
*/
public void deleteArc(int nParent, int nChild) throws Exception {
// update undo stack
if (m_bNeedsUndoAction) {
addUndoAction(new DeleteArcAction(nParent, nChild));
}
// update distributions
// condense distribution, use values for targetnode = 0
int nParentCard = m_ParentSets[nChild].getCardinalityOfParents();
int nTargetCard = m_Instances.attribute(nChild).numValues();
nParentCard = nParentCard / nTargetCard;
Estimator[] distribution2 = new Estimator[nParentCard];
for (int iParent = 0; iParent < nParentCard; iParent++) {
distribution2[iParent] = m_Distributions[nChild][iParent];
}
m_Distributions[nChild] = distribution2;
// update parentsets
m_ParentSets[nChild].deleteParent(nParent, m_Instances);
} // deleteArc
/**
* specify distribution of a node
*
* @param sName name of the node to specify distribution for
* @param P matrix representing distribution with P[i][j] = P(node = j |
* parent configuration = i)
* @throws Exception if parent or child cannot be found in network
*/
public void setDistribution(String sName, double[][] P) throws Exception {
int nTargetNode = getNode(sName);
setDistribution(nTargetNode, P);
} // setDistribution
/**
* specify distribution of a node
*
* @param nTargetNode index of the node to specify distribution for
* @param P matrix representing distribution with P[i][j] = P(node = j |
* parent configuration = i)
* @throws Exception if parent or child cannot be found in network
*/
public void setDistribution(int nTargetNode, double[][] P) throws Exception {
// update undo stack
if (m_bNeedsUndoAction) {
addUndoAction(new SetDistributionAction(nTargetNode, P));
}
Estimator[] distributions = m_Distributions[nTargetNode];
for (int iParent = 0; iParent < distributions.length; iParent++) {
DiscreteEstimatorBayes distribution = new DiscreteEstimatorBayes(
P[0].length, 0);
for (int iValue = 0; iValue < distribution.getNumSymbols(); iValue++) {
distribution.addValue(iValue, P[iParent][iValue]);
}
distributions[iParent] = distribution;
}
// m_Distributions[nTargetNode] = distributions;
} // setDistribution
/**
* returns distribution of a node in matrix form with matrix representing
* distribution with P[i][j] = P(node = j | parent configuration = i)
*
* @param sName name of the node to get distribution from
*/
public double[][] getDistribution(String sName) {
int nTargetNode = getNode2(sName);
return getDistribution(nTargetNode);
} // getDistribution
/**
* returns distribution of a node in matrix form with matrix representing
* distribution with P[i][j] = P(node = j | parent configuration = i)
*
* @param nTargetNode index of the node to get distribution from
*/
public double[][] getDistribution(int nTargetNode) {
int nParentCard = m_ParentSets[nTargetNode].getCardinalityOfParents();
int nCard = m_Instances.attribute(nTargetNode).numValues();
double[][] P = new double[nParentCard][nCard];
for (int iParent = 0; iParent < nParentCard; iParent++) {
for (int iValue = 0; iValue < nCard; iValue++) {
P[iParent][iValue] = m_Distributions[nTargetNode][iParent]
.getProbability(iValue);
}
}
return P;
} // getDistribution
/**
* returns array of values of a node
*
* @param sName name of the node to get values from
*/
public String[] getValues(String sName) {
int nTargetNode = getNode2(sName);
return getValues(nTargetNode);
} // getValues
/**
* returns array of values of a node
*
* @param nTargetNode index of the node to get values from
*/
public String[] getValues(int nTargetNode) {
String[] values = new String[getCardinality(nTargetNode)];
for (int iValue = 0; iValue < values.length; iValue++) {
values[iValue] = m_Instances.attribute(nTargetNode).value(iValue);
}
return values;
} // getValues
/**
* returns value of a node
*
* @param nTargetNode index of the node to get values from
* @param iValue index of the value
*/
public String getValueName(int nTargetNode, int iValue) {
return m_Instances.attribute(nTargetNode).value(iValue);
} // getNodeValue
/**
* change the name of a node
*
* @param nTargetNode index of the node to set name for
* @param sName new name to assign
*/
public void setNodeName(int nTargetNode, String sName) {
// update undo stack
if (m_bNeedsUndoAction) {
addUndoAction(new RenameAction(nTargetNode, getNodeName(nTargetNode),
sName));
}
Attribute att = m_Instances.attribute(nTargetNode);
int nCardinality = att.numValues();
ArrayList values = new ArrayList(nCardinality);
for (int iValue = 0; iValue < nCardinality; iValue++) {
values.add(att.value(iValue));
}
replaceAtt(nTargetNode, sName, values);
} // setNodeName
/**
* change the name of a value of a node
*
* @param nTargetNode index of the node to set name for
* @param sValue current name of the value
* @param sNewValue new name of the value
*/
public void renameNodeValue(int nTargetNode, String sValue, String sNewValue) {
// update undo stack
if (m_bNeedsUndoAction) {
addUndoAction(new RenameValueAction(nTargetNode, sValue, sNewValue));
}
Attribute att = m_Instances.attribute(nTargetNode);
int nCardinality = att.numValues();
ArrayList values = new ArrayList(nCardinality);
for (int iValue = 0; iValue < nCardinality; iValue++) {
if (att.value(iValue).equals(sValue)) {
values.add(sNewValue);
} else {
values.add(att.value(iValue));
}
}
replaceAtt(nTargetNode, att.name(), values);
} // renameNodeValue
/**
* Add node value to a node. Distributions for the node assign zero
* probability to the new value. Child nodes duplicate CPT conditioned on the
* new value.
*
* @param nTargetNode index of the node to add value for
* @param sNewValue name of the value
*/
public void addNodeValue(int nTargetNode, String sNewValue) {
// update undo stack
if (m_bNeedsUndoAction) {
addUndoAction(new AddValueAction(nTargetNode, sNewValue));
}
Attribute att = m_Instances.attribute(nTargetNode);
int nCardinality = att.numValues();
ArrayList values = new ArrayList(nCardinality);
for (int iValue = 0; iValue < nCardinality; iValue++) {
values.add(att.value(iValue));
}
values.add(sNewValue);
replaceAtt(nTargetNode, att.name(), values);
// update distributions of this node
Estimator[] distributions = m_Distributions[nTargetNode];
int nNewCard = values.size();
for (int iParent = 0; iParent < distributions.length; iParent++) {
DiscreteEstimatorBayes distribution = new DiscreteEstimatorBayes(
nNewCard, 0);
for (int iValue = 0; iValue < nNewCard - 1; iValue++) {
distribution.addValue(iValue,
distributions[iParent].getProbability(iValue));
}
distributions[iParent] = distribution;
}
// update distributions of all children
for (int iNode = 0; iNode < getNrOfNodes(); iNode++) {
if (m_ParentSets[iNode].contains(nTargetNode)) {
distributions = m_Distributions[iNode];
ParentSet parentSet = m_ParentSets[iNode];
int nParentCard = parentSet.getFreshCardinalityOfParents(m_Instances);
Estimator[] newDistributions = new Estimator[nParentCard];
int nCard = getCardinality(iNode);
int nParents = parentSet.getNrOfParents();
int[] values2 = new int[nParents];
int iOldPos = 0;
int iTargetNode = 0;
while (parentSet.getParent(iTargetNode) != nTargetNode) {
iTargetNode++;
}
for (int iPos = 0; iPos < nParentCard; iPos++) {
DiscreteEstimatorBayes distribution = new DiscreteEstimatorBayes(
nCard, 0);
for (int iValue = 0; iValue < nCard; iValue++) {
distribution.addValue(iValue,
distributions[iOldPos].getProbability(iValue));
}
newDistributions[iPos] = distribution;
// update values
int i = 0;
values2[i]++;
while (i < nParents
&& values2[i] == getCardinality(parentSet.getParent(i))) {
values2[i] = 0;
i++;
if (i < nParents) {
values2[i]++;
}
}
if (values2[iTargetNode] != nNewCard - 1) {
iOldPos++;
}
}
m_Distributions[iNode] = newDistributions;
}
}
} // addNodeValue
/**
* Delete node value from a node. Distributions for the node are scaled up
* proportional to existing distribution (or made uniform if zero probability
* is assigned to remainder of values). .* Child nodes delete CPTs conditioned
* on the new value.
*
* @param nTargetNode index of the node to delete value from
* @param sValue name of the value to delete
*/
public void delNodeValue(int nTargetNode, String sValue) throws Exception {
// update undo stack
if (m_bNeedsUndoAction) {
addUndoAction(new DelValueAction(nTargetNode, sValue));
}
Attribute att = m_Instances.attribute(nTargetNode);
int nCardinality = att.numValues();
ArrayList values = new ArrayList(nCardinality);
int nValue = -1;
for (int iValue = 0; iValue < nCardinality; iValue++) {
if (att.value(iValue).equals(sValue)) {
nValue = iValue;
} else {
values.add(att.value(iValue));
}
}
if (nValue < 0) {
// could not find value
throw new Exception("Node " + nTargetNode + " does not have value ("
+ sValue + ")");
}
replaceAtt(nTargetNode, att.name(), values);
// update distributions
Estimator[] distributions = m_Distributions[nTargetNode];
int nCard = values.size();
for (int iParent = 0; iParent < distributions.length; iParent++) {
DiscreteEstimatorBayes distribution = new DiscreteEstimatorBayes(nCard, 0);
double sum = 0;
for (int iValue = 0; iValue < nCard; iValue++) {
sum += distributions[iParent].getProbability(iValue);
}
if (sum > 0) {
for (int iValue = 0; iValue < nCard; iValue++) {
distribution.addValue(iValue,
distributions[iParent].getProbability(iValue) / sum);
}
} else {
for (int iValue = 0; iValue < nCard; iValue++) {
distribution.addValue(iValue, 1.0 / nCard);
}
}
distributions[iParent] = distribution;
}
// update distributions of all children
for (int iNode = 0; iNode < getNrOfNodes(); iNode++) {
if (m_ParentSets[iNode].contains(nTargetNode)) {
ParentSet parentSet = m_ParentSets[iNode];
distributions = m_Distributions[iNode];
Estimator[] newDistributions = new Estimator[distributions.length
* nCard / (nCard + 1)];
int iCurrentDist = 0;
int nParents = parentSet.getNrOfParents();
int[] values2 = new int[nParents];
// fill in the values
int nParentCard = parentSet.getFreshCardinalityOfParents(m_Instances)
* (nCard + 1) / nCard;
int iTargetNode = 0;
while (parentSet.getParent(iTargetNode) != nTargetNode) {
iTargetNode++;
}
int[] nCards = new int[nParents];
for (int iParent = 0; iParent < nParents; iParent++) {
nCards[iParent] = getCardinality(parentSet.getParent(iParent));
}
nCards[iTargetNode]++;
for (int iPos = 0; iPos < nParentCard; iPos++) {
if (values2[iTargetNode] != nValue) {
newDistributions[iCurrentDist++] = distributions[iPos];
}
// update values
int i = 0;
values2[i]++;
while (i < nParents && values2[i] == nCards[i]) {
values2[i] = 0;
i++;
if (i < nParents) {
values2[i]++;
}
}
}
m_Distributions[iNode] = newDistributions;
}
}
// update evidence
if (getEvidence(nTargetNode) > nValue) {
setEvidence(nTargetNode, getEvidence(nTargetNode) - 1);
}
} // delNodeValue
/**
* set position of node
*
* @param iNode index of node to set position for
* @param nX x position of new position
* @param nY y position of new position
*/
public void setPosition(int iNode, int nX, int nY) {
// update undo stack
if (m_bNeedsUndoAction) {
boolean isUpdate = false;
UndoAction undoAction = null;
try {
if (m_undoStack.size() > 0) {
undoAction = m_undoStack.get(m_undoStack.size() - 1);
SetPositionAction posAction = (SetPositionAction) undoAction;
if (posAction.m_nTargetNode == iNode) {
isUpdate = true;
posAction.setUndoPosition(nX, nY);
}
}
} catch (Exception e) {
// ignore. it's not a SetPositionAction
}
if (!isUpdate) {
addUndoAction(new SetPositionAction(iNode, nX, nY));
}
}
m_nPositionX.add(iNode, nX);
m_nPositionY.add(iNode, nY);
} // setPosition
/**
* Set position of node. Move set of nodes with the same displacement as a
* specified node.
*
* @param nNode index of node to set position for
* @param nX x position of new position
* @param nY y position of new position
* @param nodes array of indexes of nodes to move
*/
public void setPosition(int nNode, int nX, int nY, ArrayList nodes) {
int dX = nX - getPositionX(nNode);
int dY = nY - getPositionY(nNode);
// update undo stack
if (m_bNeedsUndoAction) {
boolean isUpdate = false;
try {
UndoAction undoAction = null;
if (m_undoStack.size() > 0) {
undoAction = m_undoStack.get(m_undoStack.size() - 1);
SetGroupPositionAction posAction = (SetGroupPositionAction) undoAction;
isUpdate = true;
int iNode = 0;
while (isUpdate && iNode < posAction.m_nodes.size()) {
if (posAction.m_nodes.get(iNode) != nodes.get(iNode)) {
isUpdate = false;
}
iNode++;
}
if (isUpdate == true) {
posAction.setUndoPosition(dX, dY);
}
}
} catch (Exception e) {
// ignore. it's not a SetPositionAction
}
if (!isUpdate) {
addUndoAction(new SetGroupPositionAction(nodes, dX, dY));
}
}
for (int iNode = 0; iNode < nodes.size(); iNode++) {
nNode = nodes.get(iNode);
m_nPositionX.set(nNode, getPositionX(nNode) + dX);
m_nPositionY.set(nNode, getPositionY(nNode) + dY);
}
} // setPosition
/**
* set positions of all nodes
*
* @param nPosX new x positions for all nodes
* @param nPosY new y positions for all nodes
*/
public void layoutGraph(ArrayList nPosX, ArrayList nPosY) {
if (m_bNeedsUndoAction) {
addUndoAction(new LayoutGraphAction(nPosX, nPosY));
}
m_nPositionX = nPosX;
m_nPositionY = nPosY;
} // layoutGraph
/**
* get x position of a node
*
* @param iNode index of node of interest
*/
public int getPositionX(int iNode) {
return (m_nPositionX.get(iNode));
}
/**
* get y position of a node
*
* @param iNode index of node of interest
*/
public int getPositionY(int iNode) {
return (m_nPositionY.get(iNode));
}
/**
* align set of nodes with the left most node in the list
*
* @param nodes list of indexes of nodes to align
*/
public void alignLeft(ArrayList nodes) {
// update undo stack
if (m_bNeedsUndoAction) {
addUndoAction(new alignLeftAction(nodes));
}
int nMinX = -1;
for (int iNode = 0; iNode < nodes.size(); iNode++) {
int nX = getPositionX(nodes.get(iNode));
if (nX < nMinX || iNode == 0) {
nMinX = nX;
}
}
for (int iNode = 0; iNode < nodes.size(); iNode++) {
int nNode = nodes.get(iNode);
m_nPositionX.set(nNode, nMinX);
}
} // alignLeft
/**
* align set of nodes with the right most node in the list
*
* @param nodes list of indexes of nodes to align
*/
public void alignRight(ArrayList nodes) {
// update undo stack
if (m_bNeedsUndoAction) {
addUndoAction(new alignRightAction(nodes));
}
int nMaxX = -1;
for (int iNode = 0; iNode < nodes.size(); iNode++) {
int nX = getPositionX(nodes.get(iNode));
if (nX > nMaxX || iNode == 0) {
nMaxX = nX;
}
}
for (int iNode = 0; iNode < nodes.size(); iNode++) {
int nNode = nodes.get(iNode);
m_nPositionX.set(nNode, nMaxX);
}
} // alignRight
/**
* align set of nodes with the top most node in the list
*
* @param nodes list of indexes of nodes to align
*/
public void alignTop(ArrayList nodes) {
// update undo stack
if (m_bNeedsUndoAction) {
addUndoAction(new alignTopAction(nodes));
}
int nMinY = -1;
for (int iNode = 0; iNode < nodes.size(); iNode++) {
int nY = getPositionY(nodes.get(iNode));
if (nY < nMinY || iNode == 0) {
nMinY = nY;
}
}
for (int iNode = 0; iNode < nodes.size(); iNode++) {
int nNode = nodes.get(iNode);
m_nPositionY.set(nNode, nMinY);
}
} // alignTop
/**
* align set of nodes with the bottom most node in the list
*
* @param nodes list of indexes of nodes to align
*/
public void alignBottom(ArrayList nodes) {
// update undo stack
if (m_bNeedsUndoAction) {
addUndoAction(new alignBottomAction(nodes));
}
int nMaxY = -1;
for (int iNode = 0; iNode < nodes.size(); iNode++) {
int nY = getPositionY(nodes.get(iNode));
if (nY > nMaxY || iNode == 0) {
nMaxY = nY;
}
}
for (int iNode = 0; iNode < nodes.size(); iNode++) {
int nNode = nodes.get(iNode);
m_nPositionY.set(nNode, nMaxY);
}
} // alignBottom
/**
* center set of nodes half way between left and right most node in the list
*
* @param nodes list of indexes of nodes to center
*/
public void centerHorizontal(ArrayList nodes) {
// update undo stack
if (m_bNeedsUndoAction) {
addUndoAction(new centerHorizontalAction(nodes));
}
int nMinY = -1;
int nMaxY = -1;
for (int iNode = 0; iNode < nodes.size(); iNode++) {
int nY = getPositionY(nodes.get(iNode));
if (nY < nMinY || iNode == 0) {
nMinY = nY;
}
if (nY > nMaxY || iNode == 0) {
nMaxY = nY;
}
}
for (int iNode = 0; iNode < nodes.size(); iNode++) {
int nNode = nodes.get(iNode);
m_nPositionY.set(nNode, (nMinY + nMaxY) / 2);
}
} // centerHorizontal
/**
* center set of nodes half way between top and bottom most node in the list
*
* @param nodes list of indexes of nodes to center
*/
public void centerVertical(ArrayList nodes) {
// update undo stack
if (m_bNeedsUndoAction) {
addUndoAction(new centerVerticalAction(nodes));
}
int nMinX = -1;
int nMaxX = -1;
for (int iNode = 0; iNode < nodes.size(); iNode++) {
int nX = getPositionX(nodes.get(iNode));
if (nX < nMinX || iNode == 0) {
nMinX = nX;
}
if (nX > nMaxX || iNode == 0) {
nMaxX = nX;
}
}
for (int iNode = 0; iNode < nodes.size(); iNode++) {
int nNode = nodes.get(iNode);
m_nPositionX.set(nNode, (nMinX + nMaxX) / 2);
}
} // centerVertical
/**
* space out set of nodes evenly between left and right most node in the list
*
* @param nodes list of indexes of nodes to space out
*/
public void spaceHorizontal(ArrayList nodes) {
// update undo stack
if (m_bNeedsUndoAction) {
addUndoAction(new spaceHorizontalAction(nodes));
}
int nMinX = -1;
int nMaxX = -1;
for (int iNode = 0; iNode < nodes.size(); iNode++) {
int nX = getPositionX(nodes.get(iNode));
if (nX < nMinX || iNode == 0) {
nMinX = nX;
}
if (nX > nMaxX || iNode == 0) {
nMaxX = nX;
}
}
for (int iNode = 0; iNode < nodes.size(); iNode++) {
int nNode = nodes.get(iNode);
m_nPositionX.set(nNode,
(int) (nMinX + iNode * (nMaxX - nMinX) / (nodes.size() - 1.0)));
}
} // spaceHorizontal
/**
* space out set of nodes evenly between top and bottom most node in the list
*
* @param nodes list of indexes of nodes to space out
*/
public void spaceVertical(ArrayList nodes) {
// update undo stack
if (m_bNeedsUndoAction) {
addUndoAction(new spaceVerticalAction(nodes));
}
int nMinY = -1;
int nMaxY = -1;
for (int iNode = 0; iNode < nodes.size(); iNode++) {
int nY = getPositionY(nodes.get(iNode));
if (nY < nMinY || iNode == 0) {
nMinY = nY;
}
if (nY > nMaxY || iNode == 0) {
nMaxY = nY;
}
}
for (int iNode = 0; iNode < nodes.size(); iNode++) {
int nNode = nodes.get(iNode);
m_nPositionY.set(nNode,
(int) (nMinY + iNode * (nMaxY - nMinY) / (nodes.size() - 1.0)));
}
} // spaceVertical
/**
* replace attribute with specified name and values
*
* @param nTargetNode index of node the replace specification for
* @param sName new name of the node
* @param values array of values of the node
*/
void replaceAtt(int nTargetNode, String sName, ArrayList values) {
Attribute newAtt = new Attribute(sName, values);
if (m_Instances.classIndex() == nTargetNode) {
m_Instances.setClassIndex(-1);
/*
* m_Instances.insertAttributeAt(newAtt, nTargetNode);
* m_Instances.deleteAttributeAt(nTargetNode + 1);
* m_Instances.setClassIndex(nTargetNode);
*/
m_Instances.deleteAttributeAt(nTargetNode);
m_Instances.insertAttributeAt(newAtt, nTargetNode);
m_Instances.setClassIndex(nTargetNode);
} else {
/*
* m_Instances.insertAttributeAt(newAtt, nTargetNode);
* m_Instances.deleteAttributeAt(nTargetNode + 1);
*/
m_Instances.deleteAttributeAt(nTargetNode);
m_Instances.insertAttributeAt(newAtt, nTargetNode);
}
} // replaceAtt
/**
* return marginal distibution for a node
*
* @param iNode index of node of interest
*/
public double[] getMargin(int iNode) {
return m_fMarginP.get(iNode);
};
/**
* set marginal distibution for a node
*
* @param iNode index of node to set marginal distribution for
* @param fMarginP marginal distribution
*/
public void setMargin(int iNode, double[] fMarginP) {
m_fMarginP.set(iNode, fMarginP);
}
/**
* get evidence state of a node. -1 represents no evidence set, otherwise the
* index of a value of the node
*
* @param iNode index of node of interest
*/
public int getEvidence(int iNode) {
return m_nEvidence.get(iNode);
}
/**
* set evidence state of a node. -1 represents no evidence set, otherwise the
* index of a value of the node
*
* @param iNode index of node of interest
* @param iValue evidence value to set
*/
public void setEvidence(int iNode, int iValue) {
m_nEvidence.set(iNode, iValue);
}
/**
* return list of children of a node
*
* @param nTargetNode index of node of interest
*/
public ArrayList getChildren(int nTargetNode) {
ArrayList children = new ArrayList();
for (int iNode = 0; iNode < getNrOfNodes(); iNode++) {
if (m_ParentSets[iNode].contains(nTargetNode)) {
children.add(iNode);
}
}
return children;
} // getChildren
/**
* returns network in XMLBIF format
*/
@Override
public String toXMLBIF03() {
if (m_Instances == null) {
return ("");
}
StringBuffer text = new StringBuffer();
text.append(getBIFHeader());
text.append("\n");
text.append("\n");
text.append("\n");
text.append("\n");
text.append("" + XMLNormalize(m_Instances.relationName())
+ "\n");
for (int iAttribute = 0; iAttribute < m_Instances.numAttributes(); iAttribute++) {
text.append("\n");
text.append(""
+ XMLNormalize(m_Instances.attribute(iAttribute).name()) + "\n");
for (int iValue = 0; iValue < m_Instances.attribute(iAttribute)
.numValues(); iValue++) {
text.append(""
+ XMLNormalize(m_Instances.attribute(iAttribute).value(iValue))
+ "\n");
}
text.append("position = (" + getPositionX(iAttribute) + ","
+ getPositionY(iAttribute) + ")\n");
text.append("\n");
}
for (int iAttribute = 0; iAttribute < m_Instances.numAttributes(); iAttribute++) {
text.append("\n");
text.append(""
+ XMLNormalize(m_Instances.attribute(iAttribute).name()) + "\n");
for (int iParent = 0; iParent < m_ParentSets[iAttribute].getNrOfParents(); iParent++) {
text
.append(""
+ XMLNormalize(m_Instances.attribute(
m_ParentSets[iAttribute].getParent(iParent)).name())
+ "\n");
}
text.append("