weka.classifiers.trees.j48.EntropySplitCrit Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of weka-dev Show documentation
Show all versions of weka-dev Show documentation
The Waikato Environment for Knowledge Analysis (WEKA), a machine
learning workbench. This version represents the developer version, the
"bleeding edge" of development, you could say. New functionality gets added
to this version.
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see .
*/
/*
* EntropySplitCrit.java
* Copyright (C) 1999-2012 University of Waikato, Hamilton, New Zealand
*
*/
package weka.classifiers.trees.j48;
import weka.core.RevisionUtils;
import weka.core.Utils;
import weka.core.ContingencyTables;
/**
* Class for computing the entropy for a given distribution.
*
* @author Eibe Frank ([email protected])
* @version $Revision: 10055 $
*/
public final class EntropySplitCrit
extends EntropyBasedSplitCrit {
/** for serialization */
private static final long serialVersionUID = 5986252682266803935L;
/**
* Computes entropy for given distribution.
*/
public final double splitCritValue(Distribution bags) {
return newEnt(bags);
}
/**
* Computes entropy of test distribution with respect to training distribution.
*/
public final double splitCritValue(Distribution train, Distribution test) {
double result = 0;
int numClasses = 0;
int i, j;
// Find out relevant number of classes
for (j = 0; j < test.numClasses(); j++)
if (Utils.gr(train.perClass(j), 0) || Utils.gr(test.perClass(j), 0))
numClasses++;
// Compute entropy of test data with respect to training data
for (i = 0; i < test.numBags(); i++)
if (Utils.gr(test.perBag(i),0)) {
for (j = 0; j < test.numClasses(); j++)
if (Utils.gr(test.perClassPerBag(i, j), 0))
result -= test.perClassPerBag(i, j)*
Math.log(train.perClassPerBag(i, j) + 1);
result += test.perBag(i) * Math.log(train.perBag(i) + numClasses);
}
return result / ContingencyTables.log2;
}
/**
* Returns the revision string.
*
* @return the revision
*/
public String getRevision() {
return RevisionUtils.extract("$Revision: 10055 $");
}
}
© 2015 - 2024 Weber Informatics LLC | Privacy Policy