Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
A collection of multi-instance learning classifiers. Includes the Citation KNN method, several variants of the diverse density method, support vector machines for multi-instance learning, simple wrappers for applying standard propositional learners to multi-instance data, decision tree and rule learners, and some other methods.
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see .
*/
/*
* MIBoost.java
* Copyright (C) 2005 University of Waikato, Hamilton, New Zealand
*
*/
package weka.classifiers.mi;
import java.util.Collections;
import java.util.Enumeration;
import java.util.Vector;
import weka.classifiers.AbstractClassifier;
import weka.classifiers.Classifier;
import weka.classifiers.SingleClassifierEnhancer;
import weka.core.Capabilities;
import weka.core.Capabilities.Capability;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.MultiInstanceCapabilitiesHandler;
import weka.core.Optimization;
import weka.core.Option;
import weka.core.OptionHandler;
import weka.core.RevisionUtils;
import weka.core.TechnicalInformation;
import weka.core.TechnicalInformation.Field;
import weka.core.TechnicalInformation.Type;
import weka.core.TechnicalInformationHandler;
import weka.core.Utils;
import weka.core.WeightedInstancesHandler;
import weka.filters.Filter;
import weka.filters.unsupervised.attribute.Discretize;
import weka.filters.unsupervised.attribute.MultiInstanceToPropositional;
/**
* MI AdaBoost method, considers the geometric mean of
* posterior of instances inside a bag (arithmatic mean of log-posterior) and
* the expectation for a bag is taken inside the loss function.
*
* For more information about Adaboost, see:
*
* Yoav Freund, Robert E. Schapire: Experiments with a new boosting algorithm.
* In: Thirteenth International Conference on Machine Learning, San Francisco,
* 148-156, 1996.
*
*
*
* BibTeX:
*
*
* @inproceedings{Freund1996,
* address = {San Francisco},
* author = {Yoav Freund and Robert E. Schapire},
* booktitle = {Thirteenth International Conference on Machine Learning},
* pages = {148-156},
* publisher = {Morgan Kaufmann},
* title = {Experiments with a new boosting algorithm},
* year = {1996}
* }
*
*
*
*
* Valid options are:
*
*
*
* -B <num>
* The number of bins in discretization
* (default 0, no discretization)
*
*
*
* -R <num>
* Maximum number of boost iterations.
* (default 10)
*
*
*
* -W <class name>
* Full name of classifier to boost.
* eg: weka.classifiers.bayes.NaiveBayes
*
*
*
*
* @author Eibe Frank ([email protected])
* @author Xin Xu ([email protected])
* @version $Revision: 10369 $
*/
public class MIBoost extends SingleClassifierEnhancer implements OptionHandler,
MultiInstanceCapabilitiesHandler, TechnicalInformationHandler {
/** for serialization */
static final long serialVersionUID = -3808427225599279539L;
/** the models for the iterations */
protected Classifier[] m_Models;
/** The number of the class labels */
protected int m_NumClasses;
/** Class labels for each bag */
protected int[] m_Classes;
/** attributes name for the new dataset used to build the model */
protected Instances m_Attributes;
/** Number of iterations */
private int m_NumIterations = 100;
/** Voting weights of models */
protected double[] m_Beta;
/** the maximum number of boost iterations */
protected int m_MaxIterations = 10;
/** the number of discretization bins */
protected int m_DiscretizeBin = 0;
/** filter used for discretization */
protected Discretize m_Filter = null;
/** filter used to convert the MI dataset into single-instance dataset */
protected MultiInstanceToPropositional m_ConvertToSI = new MultiInstanceToPropositional();
/**
* Returns a string describing this filter
*
* @return a description of the filter suitable for displaying in the
* explorer/experimenter gui
*/
public String globalInfo() {
return "MI AdaBoost method, considers the geometric mean of posterior "
+ "of instances inside a bag (arithmatic mean of log-posterior) and "
+ "the expectation for a bag is taken inside the loss function.\n\n"
+ "For more information about Adaboost, see:\n\n"
+ getTechnicalInformation().toString();
}
/**
* Returns an instance of a TechnicalInformation object, containing detailed
* information about the technical background of this class, e.g., paper
* reference or book this class is based on.
*
* @return the technical information about this class
*/
@Override
public TechnicalInformation getTechnicalInformation() {
TechnicalInformation result;
result = new TechnicalInformation(Type.INPROCEEDINGS);
result.setValue(Field.AUTHOR, "Yoav Freund and Robert E. Schapire");
result.setValue(Field.TITLE, "Experiments with a new boosting algorithm");
result.setValue(Field.BOOKTITLE,
"Thirteenth International Conference on Machine Learning");
result.setValue(Field.YEAR, "1996");
result.setValue(Field.PAGES, "148-156");
result.setValue(Field.PUBLISHER, "Morgan Kaufmann");
result.setValue(Field.ADDRESS, "San Francisco");
return result;
}
/**
* Returns an enumeration describing the available options
*
* @return an enumeration of all the available options
*/
@Override
public Enumeration