All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.ddogleg.optimization.derivative.NumericalJacobianFB Maven / Gradle / Ivy

Go to download

DDogleg Numerics is a high performance Java library for non-linear optimization, robust model fitting, polynomial root finding, sorting, and more.

The newest version!
/*
 * Copyright (c) 2012-2018, Peter Abeles. All Rights Reserved.
 *
 * This file is part of DDogleg (http://ddogleg.org).
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *   http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package org.ddogleg.optimization.derivative;

import org.ddogleg.optimization.functions.FunctionNtoM;
import org.ddogleg.optimization.functions.FunctionNtoMxN;
import org.ejml.UtilEjml;
import org.ejml.data.DMatrixRMaj;

/**
 * Finite difference numerical jacobian calculation using the forward+backwards equation.
 * Difference equation, f'(x) = (f(x+h)-f(x-h))/(2*h).  Scaling is taken in account by h based
 * upon the magnitude of the elements in variable x.
 *
 * 

* NOTE: If multiple input parameters are modified by the function when a single one is changed numerical * derivatives aren't reliable. *

* * @author Peter Abeles */ public class NumericalJacobianFB implements FunctionNtoMxN { // number of input variables private final int N; // number of functions private final int M; // function being differentiated private FunctionNtoM function; // scaling of the difference parameter private double differenceScale; private double output0[]; private double output1[]; public NumericalJacobianFB(FunctionNtoM function, double differenceScale) { this.function = function; this.differenceScale = differenceScale; this.N = function.getNumOfInputsN(); this.M = function.getNumOfOutputsM(); output0 = new double[M]; output1 = new double[M]; } public NumericalJacobianFB(FunctionNtoM function) { this(function,Math.sqrt(UtilEjml.EPS)); } @Override public int getNumOfInputsN() { return N; } @Override public int getNumOfOutputsM() { return M; } @Override public void process(double[] input, DMatrixRMaj jacobian) { DMatrixRMaj J = jacobian; for( int i = 0; i < N; i++ ) { double x = input[i]; double h = x != 0 ? differenceScale*Math.abs(x) : differenceScale; // backwards sample double temp0 = x-h; input[i] = temp0; double h0 = x-temp0; function.process(input,output0); // forwards sample double temp1 = x+h; double h1 = temp1-x; input[i] = temp1; function.process(input,output1); for( int j = 0; j < M; j++ ) { J.unsafe_set(j,i,(output1[j] - output0[j])/(h0+h1)); } input[i] = x; } } @Override public DMatrixRMaj declareMatrixMxN() { return new DMatrixRMaj(M,N); } }




© 2015 - 2024 Weber Informatics LLC | Privacy Policy