net.finmath.montecarlo.automaticdifferentiation.RandomVariableDifferentiable Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of finmath-lib Show documentation
Show all versions of finmath-lib Show documentation
finmath lib is a Mathematical Finance Library in Java.
It provides algorithms and methodologies related to mathematical finance.
The newest version!
/*
* (c) Copyright Christian P. Fries, Germany. Contact: [email protected].
*
* Created on 17.06.2017
*/
package net.finmath.montecarlo.automaticdifferentiation;
import java.util.Map;
import java.util.Set;
import net.finmath.stochastic.RandomVariable;
/**
* Interface providing additional methods for
* random variable implementing RandomVariable
* allowing automatic differentiation.
*
* The interface will introduce three additional methods:
* Long getID()
and
* Map<Long, RandomVariable> getGradient()
* and
* Map<Long, RandomVariable> getTangents()
.
*
* The method getGradient
will return a map providing the first order
* differentiation of the given random variable (this) with respect to
* all its input RandomVariableDifferentiable
s.
*
* The method getTangents
will return a map providing the first order
* differentiation of all dependent random variables with respect to the
* given random variable (this).
*
* To get the differentiation dY/dX of Y with respect to a specific object X using backward mode (getGradient) use
*
* Map gradient = Y.getGradient();
* RandomVariable derivative = Y.get(X.getID());
*
*
* To get the differentiation dY/dX of Y with respect to a specific object X using forward mode (getTanget) use
*
* Map tangent = X.getTangent();
* RandomVariable derivative = X.get(Y.getID());
*
*
* Note: Some implementations may allow limit the result of the gradient to leave nodes or the result of the tangent to terminal nodes.
*
* @author Christian Fries
* @version 1.0
*/
public interface RandomVariableDifferentiable extends RandomVariable {
/**
* A unique id for this random variable. Will be used in getGradient
.
*
* @return The id for this random variable.
*/
Long getID();
/**
* Returns the gradient of this random variable with respect to all its leaf nodes.
* The method calculates the map \( v \mapsto \frac{d u}{d v} \) where \( u \) denotes this
.
*
* @return The gradient map.
*/
default Map getGradient() {
return getGradient(null);
}
/**
* Returns the gradient of this random variable with respect to the given IDs.
* The method calculates the map \( v \mapsto \frac{d u}{d v} \) where \( u \) denotes this
.
*
* @param independentIDs {@link Set} of IDs of random variables \( v \) with respect to which the gradients \( \frac{d u}{d v} \) will be calculated. If null, derivatives w.r.t. all known independents are returned.
* @return The gradient map.
*/
Map getGradient(Set independentIDs);
/**
* Returns the tangents of this random variable with respect to all its dependent nodes.
* The method calculated the map \( u \mapsto \frac{d u}{d v} \) where \( v \) denotes this
.
*
* @return The map of all tangents .
*/
default Map getTangents() {
return getTangents(null);
}
/**
* Returns the tangents of this random variable with respect to the given dependent node IDs (if dependent).
* The method calculated the map \( u \mapsto \frac{d u}{d v} \) where \( v \) denotes this
.
*
* @param dependentIDs {@link Set} of IDs of random variables \( u \) with respect to which the differentials \( \frac{d u}{d v} \) will be calculated.
* If null, derivatives w.r.t. all known dependents are returned.
* @return The map of differentials.
*/
Map getTangents(Set dependentIDs);
/**
* Returns a clone of this differentiable random variable with a new ID. This implies that the
* random variable is a leaf node and independent from all previous calculations.
*
* @return A clone of this differentiable random variable with a new ID.
*/
default RandomVariableDifferentiable getCloneIndependent() {
throw new UnsupportedOperationException("Cloning not supported. Please add implementation of getCloneIndependent.");
}
}