weka.classifiers.neural.multilayerperceptron.BackPropagation Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of wekaclassalgos Show documentation
Show all versions of wekaclassalgos Show documentation
Fork of the following defunct sourceforge.net project: https://sourceforge.net/projects/wekaclassalgos/
The newest version!
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see .
*/
package weka.classifiers.neural.multilayerperceptron;
import weka.classifiers.neural.common.NeuralModel;
import weka.classifiers.neural.common.SimpleNeuron;
import weka.classifiers.neural.common.WekaAlgorithmAncestor;
import weka.classifiers.neural.common.learning.LearningKernelFactory;
import weka.classifiers.neural.common.learning.LearningRateKernel;
import weka.classifiers.neural.common.training.TrainerFactory;
import weka.classifiers.neural.common.transfer.TransferFunction;
import weka.classifiers.neural.common.transfer.TransferFunctionFactory;
import weka.classifiers.neural.multilayerperceptron.algorithm.BackPropagationAlgorithm;
import weka.core.Instances;
import weka.core.Option;
import weka.core.SelectedTag;
import weka.core.Utils;
import java.util.ArrayList;
import java.util.Collection;
/**
* Title: Weka Neural Implementation
* Description: ...
* Copyright: Copyright (c) 2003
* Company: N/A
*
* @author Jason Brownlee
* @version 1.0
*/
public class BackPropagation extends WekaAlgorithmAncestor {
public final static int PARAM_TRANSFER_FUNCTION = 0;
public final static int PARAM_TRAINING_MODE = 1;
public final static int PARAM_MOMENTUM = 2;
public final static int PARAM_WEIGHT_DECAY = 3;
public final static int PARAM_HIDDEN_1 = 4;
public final static int PARAM_HIDDEN_2 = 5;
public final static int PARAM_HIDDEN_3 = 6;
public final static int PARAM_LEARNING_RATE_FUNCTION = 7;
// param flags
public final static String[] EXTRA_PARAMETERS =
{
"F", // transfer function
"N", // training mode
"A", // momentum
"D", // weight decay
"X", // hidden layer 1 num nodes
"Y", // hidden layer 2 num nodes
"Z", // hidden layer 3 num nodes
"M" // learning rate function
};
public final static String[] EXTRA_PARAMETER_NOTES =
{
"", // transfer function
"", // training mode
"", // momentum
"", // weight decay
"", // hidden layer 1 num nodes
"", // hidden layer 2 num nodes
"", // hidden layer 3 num nodes
"" // learning rate function
};
// descriptions for all parameters
public final static String[] EXTRA_PARAM_DESCRIPTIONS =
{
"Neuron transfer function " + TransferFunctionFactory.DESCRIPTION,
"Model training algorithm " + TrainerFactory.DESCRIPTION,
"Momentum Factor (recommend between 0.0 and 0.9, 0.0==not used)",
"Weight Decay Factor (recommend between 0.0 and 1.0, 0.0==not used)",
"The number of nodes in the first hidden layer (0 for none)",
"The number of nodes in the second hidden layer (0 for none)",
"The number of nodes in the third hidden layer (0 for none)",
"Learning rate function to use while training, static is typically better " + LearningKernelFactory.DESCRIPTION
};
// momentum
protected double momentum = 0.0;
// weight decay
protected double weightDecay = 0.0;
// topology
protected int hiddenLayer1 = 0;
protected int hiddenLayer2 = 0;
protected int hiddenLayer3 = 0;
public BackPropagation() {
// set good initial values
transferFunction = TransferFunctionFactory.TRANSFER_SIGMOID;
trainingMode = TrainerFactory.TRAINER_BATCH;
trainingIterations = 500;
biasInput = SimpleNeuron.DEFAULT_BIAS_VALUE;
learningRate = 0.1;
learningRateFunction = LearningKernelFactory.LEARNING_FUNCTION_STATIC;
randomNumberSeed = 0;
momentum = 0.2;
weightDecay = 0.0;
hiddenLayer1 = 0;
hiddenLayer2 = 0;
hiddenLayer3 = 0;
}
protected NeuralModel prepareAlgorithm(Instances instances) throws java.lang.Exception {
int[] hiddenLayersTopology = null;
// prepare the transfer function
TransferFunction function = TransferFunctionFactory.factory(transferFunction);
// prepare the learning rate function
LearningRateKernel lrateFunction = LearningKernelFactory.factory(learningRateFunction, learningRate, trainingIterations);
// prepare hidden layers topology
if (hiddenLayer1 <= 0) {
hiddenLayersTopology = null;
}
else if (hiddenLayer2 <= 0) {
hiddenLayersTopology = new int[]{hiddenLayer1};
}
else if (hiddenLayer3 <= 0) {
hiddenLayersTopology = new int[]{hiddenLayer1, hiddenLayer2};
}
else {
// all three hidden layers were specified
hiddenLayersTopology = new int[]{hiddenLayer1, hiddenLayer2, hiddenLayer3};
}
// construct the algorithm
BackPropagationAlgorithm algorithm = new BackPropagationAlgorithm(function, rand, lrateFunction, momentum, weightDecay, biasInput, hiddenLayersTopology, instances);
return algorithm;
}
protected Collection getAlgorithmOptions() {
ArrayList list = new ArrayList();
list.add("-" + EXTRA_PARAMETERS[PARAM_TRANSFER_FUNCTION]);
list.add(Integer.toString(transferFunction));
list.add("-" + EXTRA_PARAMETERS[PARAM_TRAINING_MODE]);
list.add(Integer.toString(trainingMode));
list.add("-" + EXTRA_PARAMETERS[PARAM_MOMENTUM]);
list.add(Double.toString(momentum));
list.add("-" + EXTRA_PARAMETERS[PARAM_WEIGHT_DECAY]);
list.add(Double.toString(weightDecay));
list.add("-" + EXTRA_PARAMETERS[PARAM_HIDDEN_1]);
list.add(Integer.toString(hiddenLayer1));
list.add("-" + EXTRA_PARAMETERS[PARAM_HIDDEN_2]);
list.add(Integer.toString(hiddenLayer2));
list.add("-" + EXTRA_PARAMETERS[PARAM_HIDDEN_3]);
list.add(Integer.toString(hiddenLayer3));
list.add("-" + EXTRA_PARAMETERS[PARAM_LEARNING_RATE_FUNCTION]);
list.add(Integer.toString(learningRateFunction));
return list;
}
protected Collection © 2015 - 2025 Weber Informatics LLC | Privacy Policy