Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
The Waikato Environment for Knowledge Analysis (WEKA), a machine
learning workbench. This version represents the developer version, the
"bleeding edge" of development, you could say. New functionality gets added
to this version.
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see .
*/
/*
* AdditiveRegression.java
* Copyright (C) 2000-2012 University of Waikato, Hamilton, New Zealand
*
*/
package weka.classifiers.meta;
import java.util.Collections;
import java.util.Enumeration;
import java.util.Vector;
import java.util.ArrayList;
import weka.classifiers.Classifier;
import weka.classifiers.AbstractClassifier;
import weka.classifiers.IteratedSingleClassifierEnhancer;
import weka.classifiers.IterativeClassifier;
import weka.classifiers.rules.ZeroR;
import weka.core.AdditionalMeasureProducer;
import weka.core.Capabilities;
import weka.core.Capabilities.Capability;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.Option;
import weka.core.OptionHandler;
import weka.core.RevisionUtils;
import weka.core.TechnicalInformation;
import weka.core.TechnicalInformation.Field;
import weka.core.TechnicalInformation.Type;
import weka.core.TechnicalInformationHandler;
import weka.core.Utils;
import weka.core.WeightedInstancesHandler;
import weka.core.UnassignedClassException;
/**
* Meta classifier that enhances the performance of a regression base classifier. Each iteration fits a model to the residuals left by the classifier on the previous iteration. Prediction is accomplished by adding the predictions of each classifier. Reducing the shrinkage (learning rate) parameter helps prevent overfitting and has a smoothing effect but increases the learning time.
*
* For more information see:
*
* J.H. Friedman (1999). Stochastic Gradient Boosting.
*
*
* BibTeX:
*
-A
* Minimize absolute error instead of squared error (assumes that base learner minimizes absolute error).
*
*
-D
* If set, classifier is run in debug mode and
* may output additional info to the console
*
*
-W
* Full name of base classifier.
* (default: weka.classifiers.trees.DecisionStump)
*
*
* Options specific to classifier weka.classifiers.trees.DecisionStump:
*
*
*
-D
* If set, classifier is run in debug mode and
* may output additional info to the console
*
*
* @author Mark Hall ([email protected])
* @version $Revision: 15519 $
*/
public class AdditiveRegression extends IteratedSingleClassifierEnhancer implements OptionHandler,
AdditionalMeasureProducer, WeightedInstancesHandler, TechnicalInformationHandler, IterativeClassifier {
/** for serialization */
static final long serialVersionUID = -2368937577670527151L;
/** ArrayList for storing the generated base classifiers.
Note: we are hiding the variable from IteratedSingleClassifierEnhancer*/
protected ArrayList m_Classifiers;
/** Shrinkage (Learning rate). Default = no shrinkage. */
protected double m_shrinkage = 1.0;
/** The mean or median */
protected double m_InitialPrediction;
/** whether we have suitable data or nor (if only mean/mode is used) */
protected boolean m_SuitableData = true;
/** The working data */
protected Instances m_Data;
/** The sum of (absolute or squared) residuals. */
protected double m_Error;
/** The improvement in the sum of (absolute or squared) residuals. */
protected double m_Diff;
/** Whether to minimise absolute error instead of squared error. */
protected boolean m_MinimizeAbsoluteError;
/**
* Whether to allow training to continue at a later point after the initial
* model is built.
*/
protected boolean m_resume;
/** Number of iterations performed in this session of iterating */
protected int m_numItsPerformed;
/**
* Returns a string describing this attribute evaluator
* @return a description of the evaluator suitable for
* displaying in the explorer/experimenter gui
*/
public String globalInfo() {
return " Meta classifier that enhances the performance of a regression "
+"base classifier. Each iteration fits a model to the residuals left "
+"by the classifier on the previous iteration. Prediction is "
+"accomplished by adding the predictions of each classifier. "
+"Reducing the shrinkage (learning rate) parameter helps prevent "
+"overfitting and has a smoothing effect but increases the learning "
+"time.\n\n"
+"For more information see:\n\n"
+ getTechnicalInformation().toString();
}
/**
* Returns an instance of a TechnicalInformation object, containing
* detailed information about the technical background of this class,
* e.g., paper reference or book this class is based on.
*
* @return the technical information about this class
*/
public TechnicalInformation getTechnicalInformation() {
TechnicalInformation result;
result = new TechnicalInformation(Type.TECHREPORT);
result.setValue(Field.AUTHOR, "J.H. Friedman");
result.setValue(Field.YEAR, "1999");
result.setValue(Field.TITLE, "Stochastic Gradient Boosting");
result.setValue(Field.INSTITUTION, "Stanford University");
result.setValue(Field.PS, "http://www-stat.stanford.edu/~jhf/ftp/stobst.ps");
return result;
}
/**
* Default constructor specifying DecisionStump as the classifier
*/
public AdditiveRegression() {
this(new weka.classifiers.trees.DecisionStump());
}
/**
* Constructor which takes base classifier as argument.
*
* @param classifier the base classifier to use
*/
public AdditiveRegression(Classifier classifier) {
m_Classifier = classifier;
}
/**
* String describing default classifier.
*
* @return the default classifier classname
*/
protected String defaultClassifierString() {
return "weka.classifiers.trees.DecisionStump";
}
/**
* Returns an enumeration describing the available options.
*
* @return an enumeration of all the available options.
*/
public Enumeration