weka.classifiers.lazy.KStar Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of weka-stable Show documentation
Show all versions of weka-stable Show documentation
The Waikato Environment for Knowledge Analysis (WEKA), a machine
learning workbench. This is the stable version. Apart from bugfixes, this version
does not receive any other updates.
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
/*
* KStar.java
* Copyright (C) 1995-97 University of Waikato, Hamilton, New Zealand
*
*/
package weka.classifiers.lazy;
import weka.classifiers.Classifier;
import weka.classifiers.UpdateableClassifier;
import weka.classifiers.lazy.kstar.KStarCache;
import weka.classifiers.lazy.kstar.KStarConstants;
import weka.classifiers.lazy.kstar.KStarNominalAttribute;
import weka.classifiers.lazy.kstar.KStarNumericAttribute;
import weka.core.Attribute;
import weka.core.Capabilities;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.Option;
import weka.core.RevisionUtils;
import weka.core.SelectedTag;
import weka.core.Tag;
import weka.core.TechnicalInformation;
import weka.core.TechnicalInformationHandler;
import weka.core.Utils;
import weka.core.Capabilities.Capability;
import weka.core.TechnicalInformation.Field;
import weka.core.TechnicalInformation.Type;
import java.util.Enumeration;
import java.util.Random;
import java.util.Vector;
/**
* K* is an instance-based classifier, that is the class of a test instance is based upon the class of those training instances similar to it, as determined by some similarity function. It differs from other instance-based learners in that it uses an entropy-based distance function.
*
* For more information on K*, see
*
* John G. Cleary, Leonard E. Trigg: K*: An Instance-based Learner Using an Entropic Distance Measure. In: 12th International Conference on Machine Learning, 108-114, 1995.
*
*
* BibTeX:
*
* @inproceedings{Cleary1995,
* author = {John G. Cleary and Leonard E. Trigg},
* booktitle = {12th International Conference on Machine Learning},
* pages = {108-114},
* title = {K*: An Instance-based Learner Using an Entropic Distance Measure},
* year = {1995}
* }
*
*
*
* Valid options are:
*
* -B <num>
* Manual blend setting (default 20%)
*
*
* -E
* Enable entropic auto-blend setting (symbolic class only)
*
*
* -M <char>
* Specify the missing value treatment mode (default a)
* Valid options are: a(verage), d(elete), m(axdiff), n(ormal)
*
*
*
* @author Len Trigg ([email protected])
* @author Abdelaziz Mahoui ([email protected]) - Java port
* @version $Revision: 5525 $
*/
public class KStar
extends Classifier
implements KStarConstants, UpdateableClassifier, TechnicalInformationHandler {
/** for serialization */
static final long serialVersionUID = 332458330800479083L;
/** The training instances used for classification. */
protected Instances m_Train;
/** The number of instances in the dataset */
protected int m_NumInstances;
/** The number of class values */
protected int m_NumClasses;
/** The number of attributes */
protected int m_NumAttributes;
/** The class attribute type */
protected int m_ClassType;
/** Table of random class value colomns */
protected int [][] m_RandClassCols;
/** Flag turning on and off the computation of random class colomns */
protected int m_ComputeRandomCols = ON;
/** Flag turning on and off the initialisation of config variables */
protected int m_InitFlag = ON;
/**
* A custom data structure for caching distinct attribute values
* and their scale factor or stop parameter.
*/
protected KStarCache [] m_Cache;
/** missing value treatment */
protected int m_MissingMode = M_AVERAGE;
/** 0 = use specified blend, 1 = entropic blend setting */
protected int m_BlendMethod = B_SPHERE;
/** default sphere of influence blend setting */
protected int m_GlobalBlend = 20;
/** Define possible missing value handling methods */
public static final Tag [] TAGS_MISSING = {
new Tag(M_DELETE, "Ignore the instances with missing values"),
new Tag(M_MAXDIFF, "Treat missing values as maximally different"),
new Tag(M_NORMAL, "Normalize over the attributes"),
new Tag(M_AVERAGE, "Average column entropy curves")
};
/**
* Returns a string describing classifier
* @return a description suitable for
* displaying in the explorer/experimenter gui
*/
public String globalInfo() {
return "K* is an instance-based classifier, that is the class of a test "
+ "instance is based upon the class of those training instances "
+ "similar to it, as determined by some similarity function. It differs "
+ "from other instance-based learners in that it uses an entropy-based "
+ "distance function.\n\n"
+ "For more information on K*, see\n\n"
+ getTechnicalInformation().toString();
}
/**
* Returns an instance of a TechnicalInformation object, containing
* detailed information about the technical background of this class,
* e.g., paper reference or book this class is based on.
*
* @return the technical information about this class
*/
public TechnicalInformation getTechnicalInformation() {
TechnicalInformation result;
result = new TechnicalInformation(Type.INPROCEEDINGS);
result.setValue(Field.AUTHOR, "John G. Cleary and Leonard E. Trigg");
result.setValue(Field.TITLE, "K*: An Instance-based Learner Using an Entropic Distance Measure");
result.setValue(Field.BOOKTITLE, "12th International Conference on Machine Learning");
result.setValue(Field.YEAR, "1995");
result.setValue(Field.PAGES, "108-114");
return result;
}
/**
* Returns default capabilities of the classifier.
*
* @return the capabilities of this classifier
*/
public Capabilities getCapabilities() {
Capabilities result = super.getCapabilities();
result.disableAll();
// attributes
result.enable(Capability.NOMINAL_ATTRIBUTES);
result.enable(Capability.NUMERIC_ATTRIBUTES);
result.enable(Capability.DATE_ATTRIBUTES);
result.enable(Capability.MISSING_VALUES);
// class
result.enable(Capability.NOMINAL_CLASS);
result.enable(Capability.NUMERIC_CLASS);
result.enable(Capability.DATE_CLASS);
result.enable(Capability.MISSING_CLASS_VALUES);
// instances
result.setMinimumNumberInstances(0);
return result;
}
/**
* Generates the classifier.
*
* @param instances set of instances serving as training data
* @throws Exception if the classifier has not been generated successfully
*/
public void buildClassifier(Instances instances) throws Exception {
String debug = "(KStar.buildClassifier) ";
// can classifier handle the data?
getCapabilities().testWithFail(instances);
// remove instances with missing class
instances = new Instances(instances);
instances.deleteWithMissingClass();
m_Train = new Instances(instances, 0, instances.numInstances());
// initializes class attributes ** java-speaking! :-) **
init_m_Attributes();
}
/**
* Adds the supplied instance to the training set
*
* @param instance the instance to add
* @throws Exception if instance could not be incorporated successfully
*/
public void updateClassifier(Instance instance) throws Exception {
String debug = "(KStar.updateClassifier) ";
if (m_Train.equalHeaders(instance.dataset()) == false)
throw new Exception("Incompatible instance types");
if ( instance.classIsMissing() )
return;
m_Train.add(instance);
// update relevant attributes ...
update_m_Attributes();
}
/**
* Calculates the class membership probabilities for the given test instance.
*
* @param instance the instance to be classified
* @return predicted class probability distribution
* @throws Exception if an error occurred during the prediction
*/
public double [] distributionForInstance(Instance instance) throws Exception {
String debug = "(KStar.distributionForInstance) ";
double transProb = 0.0, temp = 0.0;
double [] classProbability = new double[m_NumClasses];
double [] predictedValue = new double[1];
// initialization ...
for (int i=0; i"));
optVector.addElement(new Option(
"\tEnable entropic auto-blend setting (symbolic class only)\n",
"E", 0, "-E"));
optVector.addElement(new Option(
"\tSpecify the missing value treatment mode (default a)\n"
+"\tValid options are: a(verage), d(elete), m(axdiff), n(ormal)\n",
"M", 1,"-M "));
return optVector.elements();
}
/**
* Returns the tip text for this property
* @return tip text for this property suitable for
* displaying in the explorer/experimenter gui
*/
public String globalBlendTipText() {
return "The parameter for global blending. Values are restricted to [0,100].";
}
/**
* Set the global blend parameter
* @param b the value for global blending
*/
public void setGlobalBlend(int b) {
m_GlobalBlend = b;
if ( m_GlobalBlend > 100 ) {
m_GlobalBlend = 100;
}
if ( m_GlobalBlend < 0 ) {
m_GlobalBlend = 0;
}
}
/**
* Get the value of the global blend parameter
* @return the value of the global blend parameter
*/
public int getGlobalBlend() {
return m_GlobalBlend;
}
/**
* Returns the tip text for this property
* @return tip text for this property suitable for
* displaying in the explorer/experimenter gui
*/
public String entropicAutoBlendTipText() {
return "Whether entropy-based blending is to be used.";
}
/**
* Set whether entropic blending is to be used.
* @param e true if entropic blending is to be used
*/
public void setEntropicAutoBlend(boolean e) {
if (e) {
m_BlendMethod = B_ENTROPY;
} else {
m_BlendMethod = B_SPHERE;
}
}
/**
* Get whether entropic blending being used
* @return true if entropic blending is used
*/
public boolean getEntropicAutoBlend() {
if (m_BlendMethod == B_ENTROPY) {
return true;
}
return false;
}
/**
* Parses a given list of options.
*
* Valid options are:
*
* -B <num>
* Manual blend setting (default 20%)
*
*
* -E
* Enable entropic auto-blend setting (symbolic class only)
*
*
* -M <char>
* Specify the missing value treatment mode (default a)
* Valid options are: a(verage), d(elete), m(axdiff), n(ormal)
*
*
*
* @param options the list of options as an array of strings
* @throws Exception if an option is not supported
*/
public void setOptions(String[] options) throws Exception {
String debug = "(KStar.setOptions)";
String blendStr = Utils.getOption('B', options);
if (blendStr.length() != 0) {
setGlobalBlend(Integer.parseInt(blendStr));
}
setEntropicAutoBlend(Utils.getFlag('E', options));
String missingModeStr = Utils.getOption('M', options);
if (missingModeStr.length() != 0) {
switch ( missingModeStr.charAt(0) ) {
case 'a':
setMissingMode(new SelectedTag(M_AVERAGE, TAGS_MISSING));
break;
case 'd':
setMissingMode(new SelectedTag(M_DELETE, TAGS_MISSING));
break;
case 'm':
setMissingMode(new SelectedTag(M_MAXDIFF, TAGS_MISSING));
break;
case 'n':
setMissingMode(new SelectedTag(M_NORMAL, TAGS_MISSING));
break;
default:
setMissingMode(new SelectedTag(M_AVERAGE, TAGS_MISSING));
}
}
Utils.checkForRemainingOptions(options);
}
/**
* Gets the current settings of K*.
*
* @return an array of strings suitable for passing to setOptions()
*/
public String [] getOptions() {
// -B -E -M
String [] options = new String [ 5 ];
int itr = 0;
options[itr++] = "-B";
options[itr++] = "" + m_GlobalBlend;
if (getEntropicAutoBlend()) {
options[itr++] = "-E";
}
options[itr++] = "-M";
if (m_MissingMode == M_AVERAGE) {
options[itr++] = "" + "a";
}
else if (m_MissingMode == M_DELETE) {
options[itr++] = "" + "d";
}
else if (m_MissingMode == M_MAXDIFF) {
options[itr++] = "" + "m";
}
else if (m_MissingMode == M_NORMAL) {
options[itr++] = "" + "n";
}
while (itr < options.length) {
options[itr++] = "";
}
return options;
}
/**
* Returns a description of this classifier.
*
* @return a description of this classifier as a string.
*/
public String toString() {
StringBuffer st = new StringBuffer();
st.append("KStar Beta Verion (0.1b).\n"
+"Copyright (c) 1995-97 by Len Trigg ([email protected]).\n"
+"Java port to Weka by Abdelaziz Mahoui "
+"([email protected]).\n\nKStar options : ");
String [] ops = getOptions();
for (int i=0;i 0; j--) {
index = (int) ( generator.nextDouble() * (double)j );
temp = newArray[j];
newArray[j] = newArray[index];
newArray[index] = temp;
}
return newArray;
}
/**
* Returns the revision string.
*
* @return the revision
*/
public String getRevision() {
return RevisionUtils.extract("$Revision: 5525 $");
}
} // class end
© 2015 - 2025 Weber Informatics LLC | Privacy Policy