All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.nd4j.linalg.learning.regularization.L2Regularization Maven / Gradle / Ivy

There is a newer version: 1.0.0-M2.1
Show newest version
package org.nd4j.linalg.learning.regularization;

import lombok.Data;
import lombok.NonNull;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.api.ops.impl.transforms.pairwise.arithmetic.Axpy;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.schedule.FixedSchedule;
import org.nd4j.linalg.schedule.ISchedule;
import org.nd4j.shade.jackson.annotation.JsonProperty;

/**
 * L2 regularization: very similar to {@link WeightDecay}, but is applied before the updater is applied, not after.
 * 
*
* Implements updating as follows:
* {@code L = loss + l2 * 0.5 * sum_i w[i]^2}
* {@code w[i] -= updater(gradient[i] + l2 * w[i])
* That is, L2 regularization is applied before the updater (Adam/Nesterov/etc) is applied to the gradients. This differs * from {@link WeightDecay} mainly in that WeightDecay is applied after the updater. * * See also: {@link WeightDecay} which should generally be preferred in practice.
* See https://www.fast.ai/2018/07/02/adam-weight-decay/ * for further details * * @author Alex Black */ @Data public class L2Regularization implements Regularization { protected final ISchedule l2; /** * @param l2 L2 regularization coefficient */ public L2Regularization(double l2) { this(new FixedSchedule(l2)); } /** * @param l2 L2 regularization coefficient (schedule) */ public L2Regularization(@JsonProperty("l2") @NonNull ISchedule l2) { this.l2 = l2; } @Override public ApplyStep applyStep(){ return ApplyStep.BEFORE_UPDATER; } @Override public void apply(INDArray param, INDArray gradView, double lr, int iteration, int epoch) { //L = loss + l2 * 0.5 * sum_i x[i]^2 //dL/dx[i] = dloss/dx[i] + l2 * x[i] double coeff = l2.valueAt(iteration, epoch); Nd4j.exec(new Axpy(param, gradView, gradView, coeff)); //Gradient = scale * param + gradient } @Override public double score(INDArray param, int iteration, int epoch) { //Score: L = 0.5 * sum_i x[i]^2 double norm2 = param.norm2Number().doubleValue(); //Norm2 is sqrt(sum_i x[i]^2) return l2.valueAt(iteration, epoch) * 0.5 * norm2 * norm2; } @Override public Regularization clone() { return new L2Regularization(l2.clone()); } }




© 2015 - 2024 Weber Informatics LLC | Privacy Policy