All Downloads are FREE. Search and download functionalities are using the official Maven repository.

net.finmath.montecarlo.automaticdifferentiation.RandomVariableDifferentiable Maven / Gradle / Ivy

/*
 * (c) Copyright Christian P. Fries, Germany. Contact: [email protected].
 *
 * Created on 17.06.2017
 */
package net.finmath.montecarlo.automaticdifferentiation;

import java.util.Map;
import java.util.Set;

import net.finmath.stochastic.RandomVariable;

/**
 * Interface providing additional methods for
 * random variable implementing RandomVariable
 * allowing automatic differentiation.
 *
 * The interface will introduce three additional methods:
 * Long getID() and
 * Map<Long, RandomVariable> getGradient()
 * and
 * Map<Long, RandomVariable> getTangents().
 *
 * The method getGradient will return a map providing the first order
 * differentiation of the given random variable (this) with respect to
 * all its input RandomVariableDifferentiables.
 *
 * The method getTangents will return a map providing the first order
 * differentiation of all dependent random variables with respect to the
 * given random variable (this).
 *
 * To get the differentiation dY/dX of Y with respect to a specific object X using backward mode (getGradient) use
 * 
 * 		Map gradient = Y.getGradient();
 * 		RandomVariable derivative = Y.get(X.getID());
 * 
 *
 * To get the differentiation dY/dX of Y with respect to a specific object X using forward mode (getTanget) use
 * 
 * 		Map tangent = X.getTangent();
 * 		RandomVariable derivative = X.get(Y.getID());
 * 
 *
 * Note: Some implementations may allow limit the result of the gradient to leave nodes or the result of the tangent to terminal nodes.
 *
 * @author Christian Fries
 * @version 1.0
 */
public interface RandomVariableDifferentiable extends RandomVariable {

	/**
	 * A unique id for this random variable. Will be used in getGradient.
	 *
	 * @return The id for this random variable.
	 */
	Long getID();

	/**
	 * Returns the gradient of this random variable with respect to all its leaf nodes.
	 * The method calculates the map \( v \mapsto \frac{d u}{d v} \) where \( u \) denotes this.
	 *
	 * @return The gradient map.
	 */
	default Map getGradient() {
		return getGradient(null);
	}

	/**
	 * Returns the gradient of this random variable with respect to the given IDs.
	 * The method calculates the map \( v \mapsto \frac{d u}{d v} \) where \( u \) denotes this.
	 *
	 * @param independentIDs {@link Set} of IDs of random variables \( v \) with respect to which the gradients \( \frac{d u}{d v} \) will be calculated. If null, derivatives w.r.t. all known independents are returned.
	 * @return The gradient map.
	 */
	Map getGradient(Set independentIDs);

	/**
	 * Returns the tangents of this random variable with respect to all its dependent nodes.
	 * The method calculated the map \( u \mapsto \frac{d u}{d v} \) where \( v \) denotes this.
	 *
	 * @return The map of all tangents .
	 */
	default Map getTangents() {
		return getTangents(null);
	}

	/**
	 * Returns the tangents of this random variable with respect to the given dependent node IDs (if dependent).
	 * The method calculated the map \( u \mapsto \frac{d u}{d v} \) where \( v \) denotes this.
	 *
	 * @param dependentIDs {@link Set} of IDs of random variables \( u \) with respect to which the differentials \( \frac{d u}{d v} \) will be calculated.
	 * If null, derivatives w.r.t. all known dependents are returned.
	 * @return The map of differentials.
	 */
	Map getTangents(Set dependentIDs);

	/**
	 * Returns a clone of this differentiable random variable with a new ID. This implies that the
	 * random variable is a leaf node and independent from all previous calculations.
	 *
	 * @return A clone of this differentiable random variable with a new ID.
	 */
	default RandomVariableDifferentiable getCloneIndependent() {
		throw new UnsupportedOperationException("Cloning not supported. Please add implementation of getCloneIndependent.");
	}
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy