All Downloads are FREE. Search and download functionalities are using the official Maven repository.

meka.classifiers.multilabel.neurofuzzy.ARAMNetworkSparseV Maven / Gradle / Ivy

Go to download

The MEKA project provides an open source implementation of methods for multi-label classification and evaluation. It is based on the WEKA Machine Learning Toolkit. Several benchmark methods are also included, as well as the pruned sets and classifier chains methods, other methods from the scientific literature, and a wrapper to the MULAN framework.

The newest version!
/*
 *
 *  
 *  Adapted from NaiveBayes.java
 *  
 *  Copyright (C) 2016 Fernando Benites
 *  @author Fernando Benites
 */
package meka.classifiers.multilabel.neurofuzzy;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.Vector;


import meka.classifiers.multilabel.Evaluation;
import meka.classifiers.multilabel.MultiLabelClassifier;
import meka.classifiers.multilabel.ProblemTransformationMethod;
import weka.core.Attribute;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.Option;
import weka.core.Utils;

/**
 * ****REPLACE THE FOLLOWING WITH SIMILAR INFORMATION.
 * Class for a Naive Bayes classifier using estimator classes. Numeric 
 * estimator precision values are chosen based on analysis of the 
 * training data. For this reason, the classifier is not an 
 * UpdateableClassifier (which in typical usage are initialized with zero 
 * training instances) -- if you need the UpdateableClassifier functionality,
 * use the NaiveBayesUpdateable classifier. The NaiveBayesUpdateable
 * classifier will  use a default precision of 0.1 for numeric attributes
 * when buildClassifier is called with zero training instances.
 * 

* For more information on Naive Bayes classifiers, see

* * George H. John and Pat Langley (1995). Estimating * Continuous Distributions in Bayesian Classifiers. Proceedings * of the Eleventh Conference on Uncertainty in Artificial * Intelligence. pp. 338-345. Morgan Kaufmann, San Mateo.

* * Valid options are:

* * -K
* Use kernel estimation for modelling numeric attributes rather than * a single normal distribution.

* * -D
* Use supervised discretization to process numeric attributes.

* * @author Len Trigg ([email protected]) * @author Eibe Frank ([email protected]) * @author Rushi Bhatt ([email protected]) * @version $Revision: 1.16 $ * Modified by Rushi for use as a CN710 template */ public class ARAMNetworkSparseV extends ARAMNetworkClass { //**** THIS IS WHERE CLASSIFIER WEIGHTS ETC GO **** //define stuff like weight matrices, classifier parameters etc. //e.g., protected double rho_a_bar=0.0; SparseArray[] weightsA = null; SparseArray[] upweightsA=null; double[] sweightsA = null; double sweightsA0; SparseArray[] weightsB = null; HashMap hmclasses = null; int snumFeatures=0; int snumClasses=0; int numinstances=0; int activated=0; public ARAMNetworkSparseV(int fnumFeatures, int fnumClasses, double fro, double fthreshold) { initARAM(fnumFeatures, fnumClasses, fro, fthreshold); } public ARAMNetworkSparseV(){ } private void initARAM(int fnumFeatures, int fnumClasses, double fro, double fthreshold){ numFeatures = fnumFeatures; snumFeatures = (int)(0.5*numFeatures); numClasses = fnumClasses; snumClasses= (int)(0.5*numClasses); threshold = fthreshold; weightsA = new SparseArray[1]; weightsA[0] = new SparseArray(); upweightsA = new SparseArray[1]; upweightsA[0] = new SparseArray(); sweightsA = new double[1]; sweightsA[0]=0; for(int i=0;i(); } /** * Returns a string describing this classifier * @return a description of the classifier suitable for * displaying in the explorer/experimenter gui. * ****MODIFY WITH CORRECT INFORMATION**** */ public String globalInfo() { return "This is ARAM."; } /** * Generates the classifier. * * @param D set of instances serving as training data * @exception Exception if the classifier has not been generated * successfully */ public void buildClassifier(Instances D) throws Exception { int L = D.classIndex(); int featlength = (D.numAttributes() -L)*2; int numSamples = D.numInstances(); int classlength = L * 2; if (this.order==null){ order = new ArrayList(); for (int j=0; j= roa) { if (currentCategory == numCategories_1) { if (currentSortedIndex == maxNumCategories) { System.out .println("WARNING: The maximum number of categories has been reached."); resonance = true; } else { // Add a new category sweightsA[currentCategory]=0; int[] s1=data.getKeys(); int sit=data.size(); int count=0; int j=0; for (int jt=0;jt v = new Vector(); v.add(currentCategory); hmclasses.put(s,v); } ARAMm_Add_New_Category(); //System.out.println(numinstances+" "+numCategories); // fprintf(FileID,'Add a new category of %d\n', // network.numCategories); // Increment the number of changes since we added a // new category. numChanges = numChanges + 1; resonance = true; break; } } else { // % Update weights double weightChange = ARAMm_Update_Weights(data, labels, currentCategory); //System.out.println(numinstances+" "+currentCategory+" S:"+sweightsA[currentCategory]); //sumArrayF(this.weightsA[1]); if (weightChange == 1) { numChanges += 1; } resonance = true; break; } } else { currentSortedIndex += 1; resonance = false; } } if(!resonance && currentSortedIndex>=cateacti.length) { // Add a new category sweightsA[numCategories_1]=0; int[] s1=data.getKeys(); int sit=data.size(); int j=0; int count=0; for (int jt=0;jt v = new Vector(); v.add(numCategories_1); hmclasses.put(s,v); } ARAMm_Add_New_Category(); //System.out.println(numinstances+" "+numCategories); // fprintf(FileID,'Add a new category of %d\n', // network.numCategories); // Increment the number of changes since we added a // new category. numChanges = numChanges + 1; } } } //****THIS IS THE CLASSIFICATION ROUTINE. MODIFY TO CHANGE THE ALGORITHM**** //****classifyInstance() uses this method, so implement the //****nuts-and-bolts of your algorithm here. /** * Calculates the class membership probabilities for the given test * instance. * * @param instance the instance to be classified * @return predicted class probability distribution * @exception Exception if there is a problem generating the prediction */ public double[] distributionForInstance(Instance instance) throws Exception { int num_classes=(int) (snumClasses); double[] ranking = new double[num_classes]; // long startMilli = System.currentTimeMillis(); // for (int j = 0; j < num_features; j++) { // // double dt=instance.value(num_classes+j); // if (dt!=0){ // currentData.put(j, dt); // } // } //TODO use instance here SortPair[] sortedActivations = ARTActivateCategories(instance); java.util.Arrays.sort(sortedActivations); double s0=sortedActivations[0].getValue(); double diff_act = s0 - sortedActivations[numCategories - 2].getValue(); int largest_activ = 1; double activ_change = 0; for (int i = 1; i < sortedActivations.length; i++) { activ_change = (s0 - sortedActivations[i] .getValue()) / s0; if (activ_change > threshold * diff_act) { break; } largest_activ = largest_activ + 1; } // % largest_activ =5; double[] best_matches = new double[largest_activ]; java.util.Arrays.fill(best_matches, 1); best_matches[0]=s0; for (int i = 1; i < largest_activ; i++) { // % best_matches(i) = matches(sortedCategories(i)); best_matches[i] = sortedActivations[i].getValue(); } // % min_mat = min(best_matches); // % max_mat = max(best_matches); double sum_mat = sumArray(best_matches); int currentCategory = 0; this.neuronsactivated=new int[largest_activ]; this.neuronsactivity=new double[largest_activ]; for (int i = 0; i < largest_activ; i++) { this.neuronsactivity[i]=best_matches[i]; best_matches[i] = best_matches[i] / sum_mat; currentCategory = sortedActivations[i].getOriginalIndex(); this.neuronsactivated[i]=currentCategory; // % Fill return vector with weightB values //Set s1= weightsB[currentCategory].keySet(); int[] s1=weightsB[currentCategory].getKeys(); int sit=weightsB[currentCategory].size(); int j=0; for (int jt=0;jt s1=new HashSet(); //Set s1=Data.keySet(); long startMilli = System.currentTimeMillis(); for (int i = 0; i < numCategories-1; i++) { long startMilliss = System.nanoTime(); double sumvector = 0; // double sumweight = 0; int count=0; SparseArray s2=upweightsA[i].clone(); // HashMapIVector s2=new HashMapIVector(upweightsA[i]); //List s2=new ArrayList(weightsA[i].keySet()); //for (Integer j: s1) { //double da=(Double)Data.get(j); long st3 =0; long st4 =0; for (int tj=0; tj entry : Data.entrySet()) { int[] s1=Data.getKeys(); int sit=Data.size(); int j=0; for (int jt=0;jt max) { maxIndex = i; max = dist[i]; } } if (max > 0) { return maxIndex; } else { //return Instance.missingValue(); } case Attribute.NUMERIC: return dist[0]; default: return -1; } } // ****ANY OPTIONS/PARAMETERS GO HERE**** /** * Returns an enumeration describing the available options. * * @return an enumeration of all the available options. */ public Enumeration

* * -K
* Use kernel estimation for modelling numeric attributes rather than * a single normal distribution.

* * -D
* Use supervised discretization to process numeric attributes. * * @param options the list of options as an array of strings * @exception Exception if an option is not supported */ public void setOptions(String[] options) throws Exception { //These are just examples, modify to suit your algorithm // boolean k = Utils.getFlag('K', options); // boolean d = Utils.getFlag('D', options); // if (k && d) { // throw new IllegalArgumentException( // "Can't use both kernel density estimation and discretization!"); // } // setUseSupervisedDiscretization(d); // setUseKernelEstimator(k); roa = (Utils.getOptionPos("P",options) >= 0) ? Double.parseDouble(Utils.getOption("P", options)) : roa; m_userankstoclass= (Utils.getOptionPos("K",options) >= 0); super.setOptions(options); } //****MORE OPTION PARSING STUFF**** /** * Gets the current settings of the classifier. * * @return an array of strings suitable for passing to setOptions */ public String [] getOptions() { //These are just examples, modify to suit your algorithm String [] options = new String [3]; try{ options =weka.core.Utils.splitOptions("-P 0.9 -K"); }catch (Exception ex) { System.out.println(ex.getMessage()); } return options; } //****ANY INFORMATION LIKE NO. OF UNITS ETC PRINTED HERE /** * Returns a description of the classifier. * * @return a description of the classifier as a string. */ public String toString() { //These are just examples, modify to suit your algorithm StringBuffer text = new StringBuffer(); text.append("ML ARAM classifier"); // if (m_Instances == null) { // text.append(": No model built yet."); // } else { // try { // for (int i = 0; i < m_Distributions[0].length; i++) { // text.append("\n\nClass " + m_Instances.classAttribute().value(i) + // ": Prior probability = " + Utils. // doubleToString(m_ClassDistribution.getProbability(i), // 4, 2) + "\n\n"); // Enumeration enumAtts = m_Instances.enumerateAttributes(); // int attIndex = 0; // while (enumAtts.hasMoreElements()) { // Attribute attribute = (Attribute) enumAtts.nextElement(); // text.append(attribute.name() + ": " // + m_Distributions[attIndex][i]); // attIndex++; // } // } // } catch (Exception ex) { // text.append(ex.getMessage()); // } // } return text.toString(); } /** * Main method for testing this class. * * @param argv the options */ private double ARAMm_Update_Weights(SparseArray data, SparseArray labels, int category) { double weightChange = 0; sweightsA[category]=0; //Set s1=data.keySet(); SparseArray s2=weightsA[category].clone(); int count=0; int[] s1=data.getKeys(); int sit=data.size(); int i=0; for (int jt=0;jt { private int originalIndex; private double value; private double rawvalue; public SortPair2(double value, int originalIndex, double rawvalue) { this.value = value; this.originalIndex = originalIndex; this.rawvalue = rawvalue; } public int compareTo(SortPair2 o) { return Double.compare(o.getValue(), value); } public int getOriginalIndex() { return originalIndex; } public double getValue() { return value; } public double getRawValue() { return rawvalue; } } @Override public boolean isThreaded() { // TODO Auto-generated method stub return false; } @Override public void setThreaded(boolean setv) { // TODO Auto-generated method stub } @Override public String getModel() { // TODO Auto-generated method stub return null; } }





© 2015 - 2025 Weber Informatics LLC | Privacy Policy