weka.classifiers.mi.TLD Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of multiInstanceLearning Show documentation
Show all versions of multiInstanceLearning Show documentation
A collection of multi-instance learning classifiers. Includes the Citation KNN method, several variants of the diverse density method, support vector machines for multi-instance learning, simple wrappers for applying standard propositional learners to multi-instance data, decision tree and rule learners, and some other methods.
The newest version!
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see .
*/
/*
* TLD.java
* Copyright (C) 2005 University of Waikato, Hamilton, New Zealand
*
*/
package weka.classifiers.mi;
import java.util.Collections;
import java.util.Enumeration;
import java.util.Random;
import java.util.Vector;
import weka.classifiers.RandomizableClassifier;
import weka.core.Capabilities;
import weka.core.Capabilities.Capability;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.MultiInstanceCapabilitiesHandler;
import weka.core.Optimization;
import weka.core.Option;
import weka.core.OptionHandler;
import weka.core.RevisionUtils;
import weka.core.TechnicalInformation;
import weka.core.TechnicalInformation.Field;
import weka.core.TechnicalInformation.Type;
import weka.core.TechnicalInformationHandler;
import weka.core.Utils;
/**
* Two-Level Distribution approach, changes the
* starting value of the searching algorithm, supplement the cut-off
* modification and check missing values.
*
* For more information see:
*
* Xin Xu (2003). Statistical learning in multiple instance problem. Hamilton,
* NZ.
*
*
*
* BibTeX:
*
*
* @mastersthesis{Xu2003,
* address = {Hamilton, NZ},
* author = {Xin Xu},
* note = {0657.594},
* school = {University of Waikato},
* title = {Statistical learning in multiple instance problem},
* year = {2003}
* }
*
*
*
*
* Valid options are:
*
*
*
* -C
* Set whether or not use empirical
* log-odds cut-off instead of 0
*
*
*
* -R <numOfRuns>
* Set the number of multiple runs
* needed for searching the MLE.
*
*
*
* -S <num>
* Random number seed.
* (default 1)
*
*
*
*
* @author Eibe Frank ([email protected])
* @author Xin Xu ([email protected])
* @version $Revision: 10369 $
*/
public class TLD extends RandomizableClassifier implements OptionHandler,
MultiInstanceCapabilitiesHandler, TechnicalInformationHandler {
/** for serialization */
static final long serialVersionUID = 6657315525171152210L;
/** The mean for each attribute of each positive exemplar */
protected double[][] m_MeanP = null;
/** The variance for each attribute of each positive exemplar */
protected double[][] m_VarianceP = null;
/** The mean for each attribute of each negative exemplar */
protected double[][] m_MeanN = null;
/** The variance for each attribute of each negative exemplar */
protected double[][] m_VarianceN = null;
/** The effective sum of weights of each positive exemplar in each dimension */
protected double[][] m_SumP = null;
/** The effective sum of weights of each negative exemplar in each dimension */
protected double[][] m_SumN = null;
/** The parameters to be estimated for each positive exemplar */
protected double[] m_ParamsP = null;
/** The parameters to be estimated for each negative exemplar */
protected double[] m_ParamsN = null;
/** The dimension of each exemplar, i.e. (numAttributes-2) */
protected int m_Dimension = 0;
/** The class label of each exemplar */
protected double[] m_Class = null;
/** The number of class labels in the data */
protected int m_NumClasses = 2;
/** The very small number representing zero */
static public double ZERO = 1.0e-6;
/** The number of runs to perform */
protected int m_Run = 1;
protected double m_Cutoff;
protected boolean m_UseEmpiricalCutOff = false;
/**
* Returns a string describing this filter
*
* @return a description of the filter suitable for displaying in the
* explorer/experimenter gui
*/
public String globalInfo() {
return "Two-Level Distribution approach, changes the starting value of "
+ "the searching algorithm, supplement the cut-off modification and "
+ "check missing values.\n\n" + "For more information see:\n\n"
+ getTechnicalInformation().toString();
}
/**
* Returns an instance of a TechnicalInformation object, containing detailed
* information about the technical background of this class, e.g., paper
* reference or book this class is based on.
*
* @return the technical information about this class
*/
@Override
public TechnicalInformation getTechnicalInformation() {
TechnicalInformation result;
result = new TechnicalInformation(Type.MASTERSTHESIS);
result.setValue(Field.AUTHOR, "Xin Xu");
result.setValue(Field.YEAR, "2003");
result.setValue(Field.TITLE,
"Statistical learning in multiple instance problem");
result.setValue(Field.SCHOOL, "University of Waikato");
result.setValue(Field.ADDRESS, "Hamilton, NZ");
result.setValue(Field.NOTE, "0657.594");
return result;
}
/**
* Returns default capabilities of the classifier.
*
* @return the capabilities of this classifier
*/
@Override
public Capabilities getCapabilities() {
Capabilities result = super.getCapabilities();
result.disableAll();
// attributes
result.enable(Capability.NOMINAL_ATTRIBUTES);
result.enable(Capability.RELATIONAL_ATTRIBUTES);
// class
result.enable(Capability.BINARY_CLASS);
result.enable(Capability.MISSING_CLASS_VALUES);
// other
result.enable(Capability.ONLY_MULTIINSTANCE);
return result;
}
/**
* Returns the capabilities of this multi-instance classifier for the
* relational data.
*
* @return the capabilities of this object
* @see Capabilities
*/
@Override
public Capabilities getMultiInstanceCapabilities() {
Capabilities result = super.getCapabilities();
result.disableAll();
// attributes
result.enable(Capability.NUMERIC_ATTRIBUTES);
result.enable(Capability.MISSING_VALUES);
// class
result.disableAllClasses();
result.enable(Capability.NO_CLASS);
return result;
}
/**
*
* @param exs the training exemplars
* @throws Exception if the model cannot be built properly
*/
@Override
public void buildClassifier(Instances exs) throws Exception {
// can classifier handle the data?
getCapabilities().testWithFail(exs);
// remove instances with missing class
exs = new Instances(exs);
exs.deleteWithMissingClass();
int numegs = exs.numInstances();
m_Dimension = exs.attribute(1).relation().numAttributes();
Instances pos = new Instances(exs, 0), neg = new Instances(exs, 0);
for (int u = 0; u < numegs; u++) {
Instance example = exs.instance(u);
if (example.classValue() == 1) {
pos.add(example);
} else {
neg.add(example);
}
}
int pnum = pos.numInstances(), nnum = neg.numInstances();
m_MeanP = new double[pnum][m_Dimension];
m_VarianceP = new double[pnum][m_Dimension];
m_SumP = new double[pnum][m_Dimension];
m_MeanN = new double[nnum][m_Dimension];
m_VarianceN = new double[nnum][m_Dimension];
m_SumN = new double[nnum][m_Dimension];
m_ParamsP = new double[4 * m_Dimension];
m_ParamsN = new double[4 * m_Dimension];
// Estimation of the parameters: as the start value for search
double[] pSumVal = new double[m_Dimension], // for m
nSumVal = new double[m_Dimension];
double[] maxVarsP = new double[m_Dimension], // for a
maxVarsN = new double[m_Dimension];
// Mean of sample variances: for b, b=a/E(\sigma^2)+2
double[] varMeanP = new double[m_Dimension], varMeanN = new double[m_Dimension];
// Variances of sample means: for w, w=E[var(\mu)]/E[\sigma^2]
double[] meanVarP = new double[m_Dimension], meanVarN = new double[m_Dimension];
// number of exemplars without all values missing
double[] numExsP = new double[m_Dimension], numExsN = new double[m_Dimension];
// Extract metadata fro both positive and negative bags
for (int v = 0; v < pnum; v++) {
/*
* Exemplar px = pos.exemplar(v); m_MeanP[v] = px.meanOrMode();
* m_VarianceP[v] = px.variance(); Instances pxi = px.getInstances();
*/
Instances pxi = pos.instance(v).relationalValue(1);
for (int k = 0; k < pxi.numAttributes(); k++) {
m_MeanP[v][k] = pxi.meanOrMode(k);
m_VarianceP[v][k] = pxi.variance(k);
}
for (int w = 0, t = 0; w < m_Dimension; w++, t++) {
// if((t==m_ClassIndex) || (t==m_IdIndex))
// t++;
if (!Double.isNaN(m_MeanP[v][w])) {
for (int u = 0; u < pxi.numInstances(); u++) {
Instance ins = pxi.instance(u);
if (!ins.isMissing(t)) {
m_SumP[v][w] += ins.weight();
}
}
numExsP[w]++;
pSumVal[w] += m_MeanP[v][w];
meanVarP[w] += m_MeanP[v][w] * m_MeanP[v][w];
if (maxVarsP[w] < m_VarianceP[v][w]) {
maxVarsP[w] = m_VarianceP[v][w];
}
varMeanP[w] += m_VarianceP[v][w];
m_VarianceP[v][w] *= (m_SumP[v][w] - 1.0);
if (m_VarianceP[v][w] < 0.0) {
m_VarianceP[v][w] = 0.0;
}
}
}
}
for (int v = 0; v < nnum; v++) {
/*
* Exemplar nx = neg.exemplar(v); m_MeanN[v] = nx.meanOrMode();
* m_VarianceN[v] = nx.variance(); Instances nxi = nx.getInstances();
*/
Instances nxi = neg.instance(v).relationalValue(1);
for (int k = 0; k < nxi.numAttributes(); k++) {
m_MeanN[v][k] = nxi.meanOrMode(k);
m_VarianceN[v][k] = nxi.variance(k);
}
for (int w = 0, t = 0; w < m_Dimension; w++, t++) {
// if((t==m_ClassIndex) || (t==m_IdIndex))
// t++;
if (!Double.isNaN(m_MeanN[v][w])) {
for (int u = 0; u < nxi.numInstances(); u++) {
if (!nxi.instance(u).isMissing(t)) {
m_SumN[v][w] += nxi.instance(u).weight();
}
}
numExsN[w]++;
nSumVal[w] += m_MeanN[v][w];
meanVarN[w] += m_MeanN[v][w] * m_MeanN[v][w];
if (maxVarsN[w] < m_VarianceN[v][w]) {
maxVarsN[w] = m_VarianceN[v][w];
}
varMeanN[w] += m_VarianceN[v][w];
m_VarianceN[v][w] *= (m_SumN[v][w] - 1.0);
if (m_VarianceN[v][w] < 0.0) {
m_VarianceN[v][w] = 0.0;
}
}
}
}
for (int w = 0; w < m_Dimension; w++) {
pSumVal[w] /= numExsP[w];
nSumVal[w] /= numExsN[w];
if (numExsP[w] > 1) {
meanVarP[w] = meanVarP[w] / (numExsP[w] - 1.0) - pSumVal[w]
* numExsP[w] / (numExsP[w] - 1.0);
}
if (numExsN[w] > 1) {
meanVarN[w] = meanVarN[w] / (numExsN[w] - 1.0) - nSumVal[w]
* numExsN[w] / (numExsN[w] - 1.0);
}
varMeanP[w] /= numExsP[w];
varMeanN[w] /= numExsN[w];
}
// Bounds and parameter values for each run
double[][] bounds = new double[2][4];
double[] pThisParam = new double[4], nThisParam = new double[4];
// Initial values for parameters
double a, b, w, m;
// Optimize for one dimension
for (int x = 0; x < m_Dimension; x++) {
if (getDebug()) {
System.err.println("\n\n!!!!!!!!!!!!!!!!!!!!!!???Dimension #" + x);
}
// Positive examplars: first run
a = (maxVarsP[x] > ZERO) ? maxVarsP[x] : 1.0;
if (varMeanP[x] <= ZERO) {
varMeanP[x] = ZERO; // modified by LinDong (09/2005)
}
b = a / varMeanP[x] + 2.0; // a/(b-2) = E(\sigma^2)
w = meanVarP[x] / varMeanP[x]; // E[var(\mu)] = w*E[\sigma^2]
if (w <= ZERO) {
w = 1.0;
}
m = pSumVal[x];
pThisParam[0] = a; // a
pThisParam[1] = b; // b
pThisParam[2] = w; // w
pThisParam[3] = m; // m
// Negative examplars: first run
a = (maxVarsN[x] > ZERO) ? maxVarsN[x] : 1.0;
if (varMeanN[x] <= ZERO) {
varMeanN[x] = ZERO; // modified by LinDong (09/2005)
}
b = a / varMeanN[x] + 2.0; // a/(b-2) = E(\sigma^2)
w = meanVarN[x] / varMeanN[x]; // E[var(\mu)] = w*E[\sigma^2]
if (w <= ZERO) {
w = 1.0;
}
m = nSumVal[x];
nThisParam[0] = a; // a
nThisParam[1] = b; // b
nThisParam[2] = w; // w
nThisParam[3] = m; // m
// Bound constraints
bounds[0][0] = ZERO; // a > 0
bounds[0][1] = 2.0 + ZERO; // b > 2
bounds[0][2] = ZERO; // w > 0
bounds[0][3] = Double.NaN;
for (int t = 0; t < 4; t++) {
bounds[1][t] = Double.NaN;
m_ParamsP[4 * x + t] = pThisParam[t];
m_ParamsN[4 * x + t] = nThisParam[t];
}
double pminVal = Double.MAX_VALUE, nminVal = Double.MAX_VALUE;
Random whichEx = new Random(m_Seed);
TLD_Optm pOp = null, nOp = null;
boolean isRunValid = true;
double[] sumP = new double[pnum], meanP = new double[pnum], varP = new double[pnum];
double[] sumN = new double[nnum], meanN = new double[nnum], varN = new double[nnum];
// One dimension
for (int p = 0; p < pnum; p++) {
sumP[p] = m_SumP[p][x];
meanP[p] = m_MeanP[p][x];
varP[p] = m_VarianceP[p][x];
}
for (int q = 0; q < nnum; q++) {
sumN[q] = m_SumN[q][x];
meanN[q] = m_MeanN[q][x];
varN[q] = m_VarianceN[q][x];
}
for (int y = 0; y < m_Run;) {
if (getDebug()) {
System.err.println("\n\n!!!!!!!!!!!!!!!!!!!!!!???Run #" + y);
}
double thisMin;
if (getDebug()) {
System.err.println("\nPositive exemplars");
}
pOp = new TLD_Optm();
pOp.setNum(sumP);
pOp.setSSquare(varP);
pOp.setXBar(meanP);
pThisParam = pOp.findArgmin(pThisParam, bounds);
while (pThisParam == null) {
pThisParam = pOp.getVarbValues();
if (getDebug()) {
System.err.println("!!! 200 iterations finished, not enough!");
}
pThisParam = pOp.findArgmin(pThisParam, bounds);
}
thisMin = pOp.getMinFunction();
if (!Double.isNaN(thisMin) && (thisMin < pminVal)) {
pminVal = thisMin;
for (int z = 0; z < 4; z++) {
m_ParamsP[4 * x + z] = pThisParam[z];
}
}
if (Double.isNaN(thisMin)) {
pThisParam = new double[4];
isRunValid = false;
}
if (getDebug()) {
System.err.println("\nNegative exemplars");
}
nOp = new TLD_Optm();
nOp.setNum(sumN);
nOp.setSSquare(varN);
nOp.setXBar(meanN);
nThisParam = nOp.findArgmin(nThisParam, bounds);
while (nThisParam == null) {
nThisParam = nOp.getVarbValues();
if (getDebug()) {
System.err.println("!!! 200 iterations finished, not enough!");
}
nThisParam = nOp.findArgmin(nThisParam, bounds);
}
thisMin = nOp.getMinFunction();
if (!Double.isNaN(thisMin) && (thisMin < nminVal)) {
nminVal = thisMin;
for (int z = 0; z < 4; z++) {
m_ParamsN[4 * x + z] = nThisParam[z];
}
}
if (Double.isNaN(thisMin)) {
nThisParam = new double[4];
isRunValid = false;
}
if (!isRunValid) {
y--;
isRunValid = true;
}
if (++y < m_Run) {
// Change the initial parameters and restart
int pone = whichEx.nextInt(pnum), // Randomly pick one pos. exmpl.
none = whichEx.nextInt(nnum);
// Positive exemplars: next run
while ((m_SumP[pone][x] <= 1.0) || Double.isNaN(m_MeanP[pone][x])) {
pone = whichEx.nextInt(pnum);
}
a = m_VarianceP[pone][x] / (m_SumP[pone][x] - 1.0);
if (a <= ZERO) {
a = m_ParamsN[4 * x]; // Change to negative params
}
m = m_MeanP[pone][x];
double sq = (m - m_ParamsP[4 * x + 3]) * (m - m_ParamsP[4 * x + 3]);
b = a * m_ParamsP[4 * x + 2] / sq + 2.0; // b=a/Var+2, assuming
// Var=Sq/w'
if ((b <= ZERO) || Double.isNaN(b) || Double.isInfinite(b)) {
b = m_ParamsN[4 * x + 1];
}
w = sq * (m_ParamsP[4 * x + 1] - 2.0) / m_ParamsP[4 * x];// w=Sq/Var,
// assuming
// Var=a'/(b'-2)
if ((w <= ZERO) || Double.isNaN(w) || Double.isInfinite(w)) {
w = m_ParamsN[4 * x + 2];
}
pThisParam[0] = a; // a
pThisParam[1] = b; // b
pThisParam[2] = w; // w
pThisParam[3] = m; // m
// Negative exemplars: next run
while ((m_SumN[none][x] <= 1.0) || Double.isNaN(m_MeanN[none][x])) {
none = whichEx.nextInt(nnum);
}
a = m_VarianceN[none][x] / (m_SumN[none][x] - 1.0);
if (a <= ZERO) {
a = m_ParamsP[4 * x];
}
m = m_MeanN[none][x];
sq = (m - m_ParamsN[4 * x + 3]) * (m - m_ParamsN[4 * x + 3]);
b = a * m_ParamsN[4 * x + 2] / sq + 2.0; // b=a/Var+2, assuming
// Var=Sq/w'
if ((b <= ZERO) || Double.isNaN(b) || Double.isInfinite(b)) {
b = m_ParamsP[4 * x + 1];
}
w = sq * (m_ParamsN[4 * x + 1] - 2.0) / m_ParamsN[4 * x];// w=Sq/Var,
// assuming
// Var=a'/(b'-2)
if ((w <= ZERO) || Double.isNaN(w) || Double.isInfinite(w)) {
w = m_ParamsP[4 * x + 2];
}
nThisParam[0] = a; // a
nThisParam[1] = b; // b
nThisParam[2] = w; // w
nThisParam[3] = m; // m
}
}
}
for (int x = 0, y = 0; x < m_Dimension; x++, y++) {
// if((x==exs.classIndex()) || (x==exs.idIndex()))
// y++;
a = m_ParamsP[4 * x];
b = m_ParamsP[4 * x + 1];
w = m_ParamsP[4 * x + 2];
m = m_ParamsP[4 * x + 3];
if (getDebug()) {
System.err.println("\n\n???Positive: ( "
+ exs.attribute(1).relation().attribute(y) + "): a=" + a + ", b=" + b
+ ", w=" + w + ", m=" + m);
}
a = m_ParamsN[4 * x];
b = m_ParamsN[4 * x + 1];
w = m_ParamsN[4 * x + 2];
m = m_ParamsN[4 * x + 3];
if (getDebug()) {
System.err.println("???Negative: ("
+ exs.attribute(1).relation().attribute(y) + "): a=" + a + ", b=" + b
+ ", w=" + w + ", m=" + m);
}
}
if (m_UseEmpiricalCutOff) {
// Find the empirical cut-off
double[] pLogOdds = new double[pnum], nLogOdds = new double[nnum];
for (int p = 0; p < pnum; p++) {
pLogOdds[p] = likelihoodRatio(m_SumP[p], m_MeanP[p], m_VarianceP[p]);
}
for (int q = 0; q < nnum; q++) {
nLogOdds[q] = likelihoodRatio(m_SumN[q], m_MeanN[q], m_VarianceN[q]);
}
// Update m_Cutoff
findCutOff(pLogOdds, nLogOdds);
} else {
m_Cutoff = -Math.log((double) pnum / (double) nnum);
}
if (getDebug()) {
System.err.println("???Cut-off=" + m_Cutoff);
}
}
/**
*
* @param ex the given test exemplar
* @return the classification
* @throws Exception if the exemplar could not be classified successfully
*/
@Override
public double classifyInstance(Instance ex) throws Exception {
// Exemplar ex = new Exemplar(e);
Instances exi = ex.relationalValue(1);
double[] n = new double[m_Dimension];
double[] xBar = new double[m_Dimension];
double[] sSq = new double[m_Dimension];
for (int i = 0; i < exi.numAttributes(); i++) {
xBar[i] = exi.meanOrMode(i);
sSq[i] = exi.variance(i);
}
for (int w = 0, t = 0; w < m_Dimension; w++, t++) {
// if((t==m_ClassIndex) || (t==m_IdIndex))
// t++;
for (int u = 0; u < exi.numInstances(); u++) {
if (!exi.instance(u).isMissing(t)) {
n[w] += exi.instance(u).weight();
}
}
sSq[w] = sSq[w] * (n[w] - 1.0);
if (sSq[w] <= 0.0) {
sSq[w] = 0.0;
}
}
double logOdds = likelihoodRatio(n, xBar, sSq);
return (logOdds > m_Cutoff) ? 1 : 0;
}
private double likelihoodRatio(double[] n, double[] xBar, double[] sSq) {
double LLP = 0.0, LLN = 0.0;
for (int x = 0; x < m_Dimension; x++) {
if (Double.isNaN(xBar[x])) {
continue; // All missing values
}
int halfN = ((int) n[x]) / 2;
// Log-likelihood for positive
double a = m_ParamsP[4 * x], b = m_ParamsP[4 * x + 1], w = m_ParamsP[4 * x + 2], m = m_ParamsP[4 * x + 3];
LLP += 0.5
* b
* Math.log(a)
+ 0.5
* (b + n[x] - 1.0)
* Math.log(1.0 + n[x] * w)
- 0.5
* (b + n[x])
* Math.log((1.0 + n[x] * w) * (a + sSq[x]) + n[x] * (xBar[x] - m)
* (xBar[x] - m)) - 0.5 * n[x] * Math.log(Math.PI);
for (int y = 1; y <= halfN; y++) {
LLP += Math.log(b / 2.0 + n[x] / 2.0 - y);
}
if (n[x] / 2.0 > halfN) {
LLP += TLD_Optm.diffLnGamma(b / 2.0);
}
// Log-likelihood for negative
a = m_ParamsN[4 * x];
b = m_ParamsN[4 * x + 1];
w = m_ParamsN[4 * x + 2];
m = m_ParamsN[4 * x + 3];
LLN += 0.5
* b
* Math.log(a)
+ 0.5
* (b + n[x] - 1.0)
* Math.log(1.0 + n[x] * w)
- 0.5
* (b + n[x])
* Math.log((1.0 + n[x] * w) * (a + sSq[x]) + n[x] * (xBar[x] - m)
* (xBar[x] - m)) - 0.5 * n[x] * Math.log(Math.PI);
for (int y = 1; y <= halfN; y++) {
LLN += Math.log(b / 2.0 + n[x] / 2.0 - y);
}
if (n[x] / 2.0 > halfN) {
LLN += TLD_Optm.diffLnGamma(b / 2.0);
}
}
return LLP - LLN;
}
private void findCutOff(double[] pos, double[] neg) {
int[] pOrder = Utils.sort(pos), nOrder = Utils.sort(neg);
/*
* System.err.println("\n\n???Positive: "); for(int t=0; t= neg[nOrder[n]]); n++, fstAccu++) {
;
}
if (n >= nNum) { // totally seperate
m_Cutoff = (neg[nOrder[nNum - 1]] + pos[pOrder[0]]) / 2.0;
// m_Cutoff = neg[nOrder[nNum-1]];
return;
}
// count=n; NOT USED
while ((p < pNum) && (n < nNum)) {
// Compare the next in the two lists
if (pos[pOrder[p]] >= neg[nOrder[n]]) { // Neg has less log-odds
fstAccu += 1.0;
split = neg[nOrder[n]];
n++;
} else {
sndAccu -= 1.0;
split = pos[pOrder[p]];
p++;
}
// count++; NOT USED
if ((fstAccu + sndAccu > maxAccu)
|| ((fstAccu + sndAccu == maxAccu) && (Math.abs(split) < minDistTo0))) {
maxAccu = fstAccu + sndAccu;
m_Cutoff = split;
minDistTo0 = Math.abs(split);
}
}
}
/**
* Returns an enumeration describing the available options
*
* @return an enumeration of all the available options
*/
@Override
public Enumeration