Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
The Waikato Environment for Knowledge Analysis (WEKA), a machine
learning workbench. This version represents the developer version, the
"bleeding edge" of development, you could say. New functionality gets added
to this version.
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see .
*/
/*
* Bagging.java
* Copyright (C) 1999-2012 University of Waikato, Hamilton, New Zealand
*
*/
package weka.classifiers.meta;
import java.util.Arrays;
import java.util.Collections;
import java.util.Enumeration;
import java.util.List;
import java.util.Random;
import java.util.Vector;
import java.util.ArrayList;
import weka.classifiers.Classifier;
import weka.classifiers.RandomizableParallelIteratedSingleClassifierEnhancer;
import weka.core.AdditionalMeasureProducer;
import weka.core.Aggregateable;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.Option;
import weka.core.Randomizable;
import weka.core.RevisionUtils;
import weka.core.TechnicalInformation;
import weka.core.TechnicalInformation.Field;
import weka.core.TechnicalInformation.Type;
import weka.core.TechnicalInformationHandler;
import weka.core.Utils;
import weka.core.WeightedInstancesHandler;
import weka.core.PartitionGenerator;
/**
* Class for bagging a classifier to reduce variance. Can do classification and regression depending on the base learner.
*
* For more information, see
*
* Leo Breiman (1996). Bagging predictors. Machine Learning. 24(2):123-140.
*
*
* BibTeX:
*
-P
* Size of each bag, as a percentage of the
* training set size. (default 100)
*
*
-O
* Calculate the out of bag error.
*
*
-represent-copies-using-weights
* Represent copies of instances using weights rather than explicitly.
*
*
-S <num>
* Random number seed.
* (default 1)
*
*
-num-slots <num>
* Number of execution slots.
* (default 1 - i.e. no parallelism)
*
*
-I <num>
* Number of iterations.
* (default 10)
*
*
-D
* If set, classifier is run in debug mode and
* may output additional info to the console
*
*
-W
* Full name of base classifier.
* (default: weka.classifiers.trees.REPTree)
*
*
* Options specific to classifier weka.classifiers.trees.REPTree:
*
*
*
-M <minimum number of instances>
* Set minimum number of instances per leaf (default 2).
*
*
-V <minimum variance for split>
* Set minimum numeric class variance proportion
* of train variance for split (default 1e-3).
*
*
-N <number of folds>
* Number of folds for reduced error pruning (default 3).
*
*
-S <seed>
* Seed for random data shuffling (default 1).
*
*
-P
* No pruning.
*
*
-L
* Maximum tree depth (default -1, no maximum)
*
*
-I
* Initial class value count (default 0)
*
*
-R
* Spread initial count over all class values (i.e. don't use 1 per value)
*
*
* Options after -- are passed to the designated classifier.
*
* @author Eibe Frank ([email protected])
* @author Len Trigg ([email protected])
* @author Richard Kirkby ([email protected])
* @version $Revision: 10470 $
*/
public class Bagging
extends RandomizableParallelIteratedSingleClassifierEnhancer
implements WeightedInstancesHandler, AdditionalMeasureProducer,
TechnicalInformationHandler, PartitionGenerator, Aggregateable {
/** for serialization */
static final long serialVersionUID = -115879962237199703L;
/** The size of each bag sample, as a percentage of the training size */
protected int m_BagSizePercent = 100;
/** Whether to calculate the out of bag error */
protected boolean m_CalcOutOfBag = false;
/** Whether to represent copies of instances using weights rather than explicitly */
protected boolean m_RepresentUsingWeights = false;
/** The out of bag error that has been calculated */
protected double m_OutOfBagError;
/**
* Constructor.
*/
public Bagging() {
m_Classifier = new weka.classifiers.trees.REPTree();
}
/**
* Returns a string describing classifier
* @return a description suitable for
* displaying in the explorer/experimenter gui
*/
public String globalInfo() {
return "Class for bagging a classifier to reduce variance. Can do classification "
+ "and regression depending on the base learner. \n\n"
+ "For more information, see\n\n"
+ getTechnicalInformation().toString();
}
/**
* Returns an instance of a TechnicalInformation object, containing
* detailed information about the technical background of this class,
* e.g., paper reference or book this class is based on.
*
* @return the technical information about this class
*/
@Override
public TechnicalInformation getTechnicalInformation() {
TechnicalInformation result;
result = new TechnicalInformation(Type.ARTICLE);
result.setValue(Field.AUTHOR, "Leo Breiman");
result.setValue(Field.YEAR, "1996");
result.setValue(Field.TITLE, "Bagging predictors");
result.setValue(Field.JOURNAL, "Machine Learning");
result.setValue(Field.VOLUME, "24");
result.setValue(Field.NUMBER, "2");
result.setValue(Field.PAGES, "123-140");
return result;
}
/**
* String describing default classifier.
*
* @return the default classifier classname
*/
@Override
protected String defaultClassifierString() {
return "weka.classifiers.trees.REPTree";
}
/**
* Returns an enumeration describing the available options.
*
* @return an enumeration of all the available options.
*/
@Override
public Enumeration