All Downloads are FREE. Search and download functionalities are using the official Maven repository.

weka.filters.unsupervised.attribute.ClusterMembership Maven / Gradle / Ivy

Go to download

The Waikato Environment for Knowledge Analysis (WEKA), a machine learning workbench. This is the stable version. Apart from bugfixes, this version does not receive any other breaking updates.

There is a newer version: 3.8.6
Show newest version
/*
 *   This program is free software: you can redistribute it and/or modify
 *   it under the terms of the GNU General Public License as published by
 *   the Free Software Foundation, either version 3 of the License, or
 *   (at your option) any later version.
 *
 *   This program is distributed in the hope that it will be useful,
 *   but WITHOUT ANY WARRANTY; without even the implied warranty of
 *   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 *   GNU General Public License for more details.
 *
 *   You should have received a copy of the GNU General Public License
 *   along with this program.  If not, see .
 */

/*
 *    ClusterMembership.java
 *    Copyright (C) 2004-2012 University of Waikato, Hamilton, New Zealand
 *
 */

package weka.filters.unsupervised.attribute;

import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.Vector;

import weka.clusterers.AbstractDensityBasedClusterer;
import weka.clusterers.DensityBasedClusterer;
import weka.core.*;
import weka.filters.Filter;
import weka.filters.UnsupervisedFilter;

/**
 *  A filter that uses a density-based clusterer to
 * generate cluster membership values; filtered instances are composed of these
 * values plus the class attribute (if set in the input data). If a (nominal)
 * class attribute is set, the clusterer is run separately for each class. The
 * class attribute (if set) and any user-specified attributes are ignored during
 * the clustering operation
 * 

* * * Valid options are: *

* *

 * -W <clusterer name>
 *  Full name of clusterer to use. eg:
 *   weka.clusterers.EM
 *  Additional options after the '--'.
 *  (default: weka.clusterers.EM)
 * 
* *
 * -I <att1,att2-att4,...>
 *  The range of attributes the clusterer should ignore.
 *  (the class attribute is automatically ignored)
 * 
* * * * Options after the -- are passed on to the clusterer. * * @author Mark Hall ([email protected]) * @author Eibe Frank * @version $Revision: 14534 $ */ public class ClusterMembership extends Filter implements UnsupervisedFilter, OptionHandler, WeightedInstancesHandler, WeightedAttributesHandler { /** for serialization */ static final long serialVersionUID = 6675702504667714026L; /** The clusterer */ protected DensityBasedClusterer m_clusterer = new weka.clusterers.EM(); /** Array for storing the clusterers */ protected DensityBasedClusterer[] m_clusterers; /** Range of attributes to ignore */ protected Range m_ignoreAttributesRange; /** Filter for removing attributes */ protected Filter m_removeAttributes; /** The prior probability for each class */ protected double[] m_priors; /** * Returns the Capabilities of this filter. * * @return the capabilities of this object * @see Capabilities */ @Override public Capabilities getCapabilities() { Capabilities result = m_clusterer.getCapabilities(); result.enableAllClasses(); result.setMinimumNumberInstances(0); return result; } /** * Returns the Capabilities of this filter, makes sure that the class is never * set (for the clusterer). * * @param data the data to use for customization * @return the capabilities of this object, based on the data * @see #getCapabilities() */ @Override public Capabilities getCapabilities(Instances data) { Instances newData; newData = new Instances(data, 0); newData.setClassIndex(-1); return super.getCapabilities(newData); } /** * tests the data whether the filter can actually handle it * * @param instanceInfo the data to test * @throws Exception if the test fails */ @Override protected void testInputFormat(Instances instanceInfo) throws Exception { getCapabilities(instanceInfo).testWithFail(removeIgnored(instanceInfo)); } /** * Sets the format of the input instances. * * @param instanceInfo an Instances object containing the input instance * structure (any instances contained in the object are ignored - * only the structure is required). * @return true if the outputFormat may be collected immediately * @throws Exception if the inputFormat can't be set successfully */ @Override public boolean setInputFormat(Instances instanceInfo) throws Exception { super.setInputFormat(instanceInfo); m_removeAttributes = null; m_priors = null; return false; } /** * filters all attributes that should be ignored * * @param data the data to filter * @return the filtered data * @throws Exception if filtering fails */ protected Instances removeIgnored(Instances data) throws Exception { Instances result = data; if (m_ignoreAttributesRange != null || data.classIndex() >= 0) { result = new Instances(data); m_removeAttributes = new Remove(); String rangeString = ""; if (m_ignoreAttributesRange != null) { rangeString += m_ignoreAttributesRange.getRanges(); } if (data.classIndex() >= 0) { if (rangeString.length() > 0) { rangeString += "," + (data.classIndex() + 1); } else { rangeString = "" + (data.classIndex() + 1); } } ((Remove) m_removeAttributes).setAttributeIndices(rangeString); ((Remove) m_removeAttributes).setInvertSelection(false); m_removeAttributes.setInputFormat(data); result = Filter.useFilter(data, m_removeAttributes); } return result; } /** * Signify that this batch of input to the filter is finished. * * @return true if there are instances pending output * @throws IllegalStateException if no input structure has been defined */ @Override public boolean batchFinished() throws Exception { if (getInputFormat() == null) { throw new IllegalStateException("No input instance format defined"); } if (outputFormatPeek() == null) { Instances toFilter = getInputFormat(); Instances[] toFilterIgnoringAttributes; // Make subsets if class is nominal if ((toFilter.classIndex() >= 0) && toFilter.classAttribute().isNominal()) { toFilterIgnoringAttributes = new Instances[toFilter.numClasses()]; for (int i = 0; i < toFilter.numClasses(); i++) { toFilterIgnoringAttributes[i] = new Instances(toFilter, toFilter.numInstances()); } for (int i = 0; i < toFilter.numInstances(); i++) { toFilterIgnoringAttributes[(int) toFilter.instance(i).classValue()] .add(toFilter.instance(i)); } m_priors = new double[toFilter.numClasses()]; for (int i = 0; i < toFilter.numClasses(); i++) { toFilterIgnoringAttributes[i].compactify(); m_priors[i] = toFilterIgnoringAttributes[i].sumOfWeights(); } Utils.normalize(m_priors); } else { toFilterIgnoringAttributes = new Instances[1]; toFilterIgnoringAttributes[0] = toFilter; m_priors = new double[1]; m_priors[0] = 1; } // filter out attributes if necessary for (int i = 0; i < toFilterIgnoringAttributes.length; i++) { toFilterIgnoringAttributes[i] = removeIgnored(toFilterIgnoringAttributes[i]); } // build the clusterers if ((toFilter.classIndex() <= 0) || !toFilter.classAttribute().isNominal()) { m_clusterers = AbstractDensityBasedClusterer.makeCopies(m_clusterer, 1); m_clusterers[0].buildClusterer(toFilterIgnoringAttributes[0]); } else { m_clusterers = AbstractDensityBasedClusterer.makeCopies(m_clusterer, toFilter.numClasses()); for (int i = 0; i < m_clusterers.length; i++) { if (toFilterIgnoringAttributes[i].numInstances() == 0) { m_clusterers[i] = null; } else { m_clusterers[i].buildClusterer(toFilterIgnoringAttributes[i]); } } } // create output dataset ArrayList attInfo = new ArrayList(); for (int j = 0; j < m_clusterers.length; j++) { if (m_clusterers[j] != null) { for (int i = 0; i < m_clusterers[j].numberOfClusters(); i++) { attInfo.add(new Attribute("pCluster_" + j + "_" + i)); } } } if (toFilter.classIndex() >= 0) { attInfo.add((Attribute) toFilter.classAttribute().copy()); } attInfo.trimToSize(); Instances filtered = new Instances(toFilter.relationName() + "_clusterMembership", attInfo, 0); if (toFilter.classIndex() >= 0) { filtered.setClassIndex(filtered.numAttributes() - 1); } setOutputFormat(filtered); // build new dataset for (int i = 0; i < toFilter.numInstances(); i++) { convertInstance(toFilter.instance(i)); } } flushInput(); m_NewBatch = true; return (numPendingOutput() != 0); } /** * Input an instance for filtering. Ordinarily the instance is processed and * made available for output immediately. Some filters require all instances * be read before producing output. * * @param instance the input instance * @return true if the filtered instance may now be collected with output(). * @throws IllegalStateException if no input format has been defined. */ @Override public boolean input(Instance instance) throws Exception { if (getInputFormat() == null) { throw new IllegalStateException("No input instance format defined"); } if (m_NewBatch) { resetQueue(); m_NewBatch = false; } if (outputFormatPeek() != null) { convertInstance(instance); return true; } bufferInput(instance); return false; } /** * Converts logs back to density values. * * @param j the index of the clusterer * @param in the instance to convert the logs back * @return the densities * @throws Exception if something goes wrong */ protected double[] logs2densities(int j, Instance in) throws Exception { double[] logs = m_clusterers[j].logJointDensitiesForInstance(in); for (int i = 0; i < logs.length; i++) { logs[i] += Math.log(m_priors[j]); } return logs; } /** * Convert a single instance over. The converted instance is added to the end * of the output queue. * * @param instance the instance to convert * @throws Exception if something goes wrong */ protected void convertInstance(Instance instance) throws Exception { // set up values double[] instanceVals = new double[outputFormatPeek().numAttributes()]; double[] tempvals; if (instance.classIndex() >= 0) { tempvals = new double[outputFormatPeek().numAttributes() - 1]; } else { tempvals = new double[outputFormatPeek().numAttributes()]; } int pos = 0; for (int j = 0; j < m_clusterers.length; j++) { if (m_clusterers[j] != null) { double[] probs; if (m_removeAttributes != null) { m_removeAttributes.input(instance); probs = logs2densities(j, m_removeAttributes.output()); } else { probs = logs2densities(j, instance); } System.arraycopy(probs, 0, tempvals, pos, probs.length); pos += probs.length; } } tempvals = Utils.logs2probs(tempvals); System.arraycopy(tempvals, 0, instanceVals, 0, tempvals.length); if (instance.classIndex() >= 0) { instanceVals[instanceVals.length - 1] = instance.classValue(); } push(new DenseInstance(instance.weight(), instanceVals)); } /** * Returns an enumeration describing the available options. * * @return an enumeration of all the available options. */ @Override public Enumeration




© 2015 - 2024 Weber Informatics LLC | Privacy Policy