com.kotlinnlp.simplednn.deeplearning.mergelayers.affine.AffineLayerStructure.kt Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of simplednn Show documentation
Show all versions of simplednn Show documentation
SimpleDNN is a machine learning lightweight open-source library written in Kotlin whose purpose is to
support the development of feed-forward and recurrent Artificial Neural Networks.
/* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved.
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, you can obtain one at http://mozilla.org/MPL/2.0/.
* ------------------------------------------------------------------*/
package com.kotlinnlp.simplednn.deeplearning.mergelayers.affine
import com.kotlinnlp.simplednn.core.functionalities.activations.ActivationFunction
import com.kotlinnlp.simplednn.core.arrays.AugmentedArray
import com.kotlinnlp.simplednn.deeplearning.mergelayers.MergeLayer
import com.kotlinnlp.simplednn.simplemath.ndarray.NDArray
import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray
/**
* The Affine Layer Structure.
*
* @property inputArray the first input array of the layer
* @property inputArray2 the second input array of the layer
* @property outputArray the output array of the layer
* @property params the parameters which connect the input to the output
* @property activationFunction the activation function of the layer
* @property dropout the probability of dropout (default 0.0).
* If applying it, the usual value is 0.5 (better 0.25 if it's the first layer).
*/
class AffineLayerStructure>(
inputArray1: AugmentedArray,
inputArray2: AugmentedArray,
outputArray: AugmentedArray,
override val params: AffineLayerParameters,
activationFunction: ActivationFunction? = null,
dropout: Double = 0.0
) : MergeLayer(
inputArray1 = inputArray1,
inputArray2 = inputArray2,
outputArray = outputArray,
params = params,
activationFunction = activationFunction,
dropout = dropout) {
/**
* The helper which execute the forward.
*/
override val forwardHelper = AffineForwardHelper(layer = this)
/**
* The helper which execute the backward.
*/
override val backwardHelper = AffineBackwardHelper(layer = this)
/**
* The helper which calculates the relevance.
*/
override val relevanceHelper = AffineRelevanceHelper(layer = this)
/**
* Initialization: set the activation function of the outputArray.
*/
init {
if (activationFunction != null) {
outputArray.setActivation(activationFunction)
}
}
/**
* @return the [AffineLayerParameters] used to store errors
*/
override fun parametersErrorsFactory() = AffineLayerParameters(
inputSize1 = this.params.inputSize1,
inputSize2 = this.params.inputSize2,
outputSize = this.params.outputSize,
sparseInput = this.params.sparseInput,
weightsInitializer = null,
biasesInitializer = null
)
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy