com.meliorbis.numerics.fixedpoint.MultiVariateNewtonFixedPointFinder Maven / Gradle / Ivy
Show all versions of Numerics Show documentation
package com.meliorbis.numerics.fixedpoint;
import static org.apache.commons.math3.linear.MatrixUtils.createRealMatrix;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.stream.IntStream;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.math3.linear.LUDecomposition;
import org.apache.commons.math3.linear.MatrixUtils;
import org.apache.commons.math3.linear.RealMatrix;
import org.apache.commons.math3.util.Precision;
import com.meliorbis.numerics.DoubleArrayFactories;
import com.meliorbis.numerics.NumericsException;
import com.meliorbis.numerics.function.MultiVariateVectorFunction;
import com.meliorbis.numerics.generic.primitives.DoubleArray;
/**
* A class that finds fixed points in multivariate functions using Newton's Method.
*
* In each cycle, each variable is separately adjusted by a small delta to numerically approximate
* the partial derivative in that variable. Once the complete Jacobian has been constructed in this
* way, the adjustment is determined.
*/
public class MultiVariateNewtonFixedPointFinder implements FixedPointFinder
{
Logger LOG = Logger.getLogger(MultiVariateNewtonFixedPointFinder.class.getName());
private final double _targetDistance;
private final double _adjust;
private double _damping = 1d;
/**
* @param targetError_ The target precision (distance between assumed and applied). Once the error
* has become this small, the algorithm terminates.
* @param gradDelta_ The delta to use for numeric gradient calculations. This is specified as a
* proportion of the variable being adjusted.
*/
public MultiVariateNewtonFixedPointFinder(
double targetError_,
double gradDelta_)
{
_targetDistance = targetError_;
_adjust = gradDelta_;
}
/**
* Sets the damping factor to be applied when the values are adjusted. If Newton's method
* determines an adjustment vector {@code v}, then the actual (additive) adjustment applied to
* the current point will be {@code damping_ * v}.
*
* Defaults to 1.
*
* @param damping_ The damping factor
*/
public void setDamping(double damping_)
{
_damping = damping_;
}
@Override
public double[] findFixedPoint(MultiVariateVectorFunction function_, double[] initialVals_)
{
double distance;
final RealMatrix jacobean = createRealMatrix(initialVals_.length, initialVals_.length);
// This needs to be objects because that is what the function API accepts
Double[] currentInputs = ArrayUtils.toObject(initialVals_);
// Keeps track of which grad is being calculated
int currentGradIndex = -1;
Double[] lastImplied = null;
Double[] lastInputs = null;
double[] lastErrorByAdjustedVar = new double[initialVals_.length];
// Makes it easier to switch to relatively sized adjustments
Double lastDelta = _adjust;
double[] lastJump = null;
int backTrack = 0;
double lastError = Double.POSITIVE_INFINITY;
double adjustFactor = 1d;
do
{
try
{
distance = 0d;
Double[] implied = function_.call(currentInputs);
distance = evaluateMetric(currentInputs, implied);
// Checks that the error is a number, otherwise throws NumericsException
validateDistance(distance, currentInputs, implied);
LOG.info(String.format("Current distance: %s", distance));
// Solution found!
if (distance < _targetDistance)
{
return ArrayUtils.toPrimitive(currentInputs);
}
// Are we in the process of calculating a grad?
if (currentGradIndex >= 0)
{
// Remember the error resulting from adjusting this variable
lastErrorByAdjustedVar[currentGradIndex] = distance;
double[] column = null;
try
{
column = calculatePartialDiffs(lastImplied, implied, lastDelta);
} catch (NumericsException e)
{
// Fail after
if(adjustFactor == 1024)
{
// Reset the adjust factor in case of retries
adjustFactor = 1;
throw new NumericsException(
String.format("Variable %s does not affect outcome",
currentGradIndex));
}
LOG.info(
String.format("Variable %s change had no impact, increasing delta ",
currentGradIndex));
// Remove the grad delta from the variable again
currentInputs[currentGradIndex] -= lastDelta;
// Use a bigger delta, repeat
adjustFactor *= 2;
LOG.info(String.format("Adjust Factor: %s ", adjustFactor));
}
// In case of success
if(column != null)
{
adjustFactor = 1;
// But just for the one assumed that did change... (remember
// it was
// already divided by the delta above, hence the delta
// becomes 1!)
column[currentGradIndex] -= 1;
// Store the column
jacobean.setColumn(currentGradIndex, column);
// Remove the grad delta from the variable again
currentInputs[currentGradIndex] -= lastDelta;
// Move to the next dimension
currentGradIndex++;
}
}
// Did we just complete the whole grad?
if (currentGradIndex == currentInputs.length)
{
// Yes! Invert the jacobean and multiply by the error
double[] diff = new double[lastImplied.length];
for (int i = 0; i < diff.length; i++)
{
// Other way round to change the sign!
diff[i] = lastInputs[i] - lastImplied[i];
}
RealMatrix solved = new LUDecomposition(jacobean).getSolver().solve(MatrixUtils.createColumnRealMatrix(diff));
lastJump = solved.scalarMultiply(_damping).getColumn(0);
LOG.info("Completed Jacobean, adjusting vars");
// Adjust to the new point
adjustAdjustment(currentInputs, lastJump);
for (int i = 0; i < currentInputs.length; i++)
{
currentInputs[i] += lastJump[i];
LOG.fine(String.format("x%s = %s", i, currentInputs[i]));
}
// Reset for the next round
currentGradIndex = -1;
} else if (currentGradIndex == -1)
{
if (distance > lastError)
{
throw new NumericsException(
String.format("The distance has increased: %.3g > %.3g",
distance, lastError));
} else
{
lastError = distance;
// Store this point, which is where the grad is being
// calculated
lastInputs = currentInputs.clone();
lastImplied = implied;
LOG.info("Newton iteration complete");
currentGradIndex++;
}
}
// Now we are handling the next grad
if (currentGradIndex >= 0)
{
// Calculate the delta and adjust the var
lastDelta = Math.abs(lastInputs[currentGradIndex]) < 1e-3 ?
_adjust*adjustFactor :
lastInputs[currentGradIndex] * _adjust*adjustFactor;
currentInputs[currentGradIndex] += lastDelta;
LOG.info(
String.format("Setting x%s=%s for derivative calc ",
currentGradIndex, currentInputs[currentGradIndex]));
}
backTrack = 0;
} catch (Exception e)
{
LOG.log(Level.WARNING,"Caught Exception: " + e.getMessage(),e);
if (lastJump == null)
{
LOG.info("First time Exception");
throw(e);
}
if (backTrack > 5)
{
throw new RuntimeException("Too many failures, giving up", e);
}
// If we are in the middle of calculating grads, we will have to
// start again
if (currentGradIndex >= 0)
{
// Remove the grad delta from the variable again
currentInputs[currentGradIndex] -= lastDelta;
// Set -1 to restart grading5
currentGradIndex = -1;
}
// For the last shot, try finding a different direction
if(backTrack == 5) {
// Try to find a single variable change that resulted in an improved error
int index = -1;
double currentLowest = Double.NaN;
for(int i = 0; i < lastErrorByAdjustedVar.length; i++)
{
double err = lastErrorByAdjustedVar[i];
if(err < (Double.isNaN(currentLowest) ? lastError : currentLowest))
{
currentLowest = err;
index = i;
}
}
if(index != -1)
{
LOG.info(String.format("Backtracking failed, trying direction %s",index));
backTrack = 0;
// Backtrack the whole way
for (int i = 0; i < lastImplied.length; i++)
{
currentInputs[i] -= lastJump[i];
lastJump[i] = 0d;
}
double delta = (Math.abs(lastInputs[index]) < 1e-3 ?
_adjust*adjustFactor :
lastInputs[index] * _adjust*adjustFactor) * 100;
currentInputs[index] += delta;
lastJump[index] = delta;
}
else
{
LOG.info("Backtracking failed, trying reverse direction");
backTrack++;
// Failed at the last point - backtrack halfway!
for (int i = 0; i < lastImplied.length; i++)
{
lastJump[i] *= -1d;
currentInputs[i] += lastJump[i]*2;
}
}
}
else
{
LOG.info(String.format("Backtracking (%s times so far)", ++backTrack));
// Failed at the last point - backtrack halfway!
for (int i = 0; i < lastImplied.length; i++)
{
double half = lastJump[i] / 2d;
currentInputs[i] -= half;
// Store the delta as halfway, so the next time it
// would only go a quarter back etc.
lastJump[i] = half;
}
}
}
} while (true);
}
double[] calculatePartialDiffs(Double[] outputAtPoint_, Double[] outputWithDelta_, Double delta_)
{
double[] column = new double[outputWithDelta_.length];
boolean nonZero = false;
// Yes! Construct the jacobean column - Since most of the
// assumed's did not changed
// don't need to adjust for them here
for (int i = 0; i < outputWithDelta_.length; i++)
{
column[i] = (outputWithDelta_[i] - outputAtPoint_[i]) / delta_;
nonZero = nonZero || !Precision.equals(0, column[i], 1e-10);
}
if(!nonZero)
{
throw new NumericsException("The delta had no effect on output values");
}
return column;
}
double evaluateMetric(Double[] input_, Double[] output_)
{
DoubleArray> outArray = DoubleArrayFactories.createArray(ArrayUtils.toPrimitive(output_));
DoubleArray> inArray = DoubleArrayFactories.createArray(ArrayUtils.toPrimitive(input_));
// Take the differences
DoubleArray> dist = outArray.subtract(inArray);
// Log them
LOG.fine(String.format("Diffs: %s", dist));
// Calculate the metric
return Math.sqrt(dist.multiply(dist).sum());
}
void validateDistance(double error_, Double[] inputs_, final Double[] outputs_) throws NumericsException
{
// If the error is not NaN, there is nothing to do
if (!Double.isNaN(error_))
{
return;
}
// Otherwise, find the (first) output which is NaN, and throw an appropriate error
IntStream.range(0, outputs_.length).forEach( varIndex -> {
if (Double.isNaN(outputs_[varIndex]))
{
throw new NumericsException("fn returned NaN for var " + varIndex);
}
});
// Should not happen, But covering all the bases
throw new NumericsException("error is NaN");
}
/**
* Subclasses can override this method to manipulate the adjustment vector which will be
* applied, before it is applied.
*
* The default does nothing
*
* @param currentInputs_ The current point at which the adjustment was calculated and will be
* applied
* @param adjustment_ The adjustment determined by this class (Newton's Method with damping).
*/
protected void adjustAdjustment(Double[] currentInputs_, double[] adjustment_)
{
// Default does nothing
}
}