All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.deeplearning4j.nn.params.PretrainParamInitializer Maven / Gradle / Ivy

/*
 *  ******************************************************************************
 *  *
 *  *
 *  * This program and the accompanying materials are made available under the
 *  * terms of the Apache License, Version 2.0 which is available at
 *  * https://www.apache.org/licenses/LICENSE-2.0.
 *  *
 *  *  See the NOTICE file distributed with this work for additional
 *  *  information regarding copyright ownership.
 *  * Unless required by applicable law or agreed to in writing, software
 *  * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
 *  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 *  * License for the specific language governing permissions and limitations
 *  * under the License.
 *  *
 *  * SPDX-License-Identifier: Apache-2.0
 *  *****************************************************************************
 */

package org.deeplearning4j.nn.params;

import lombok.val;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.indexing.NDArrayIndex;

import java.util.Map;

/**
 * Pretrain weight initializer.
 * Has the visible bias as well as hidden and weight matrix.
 *
 * @author Adam Gibson
 */
public class PretrainParamInitializer extends DefaultParamInitializer {

    private static final PretrainParamInitializer INSTANCE = new PretrainParamInitializer();

    public static PretrainParamInitializer getInstance() {
        return INSTANCE;
    }

    public final static String VISIBLE_BIAS_KEY = "v" + DefaultParamInitializer.BIAS_KEY;

    @Override
    public long numParams(NeuralNetConfiguration conf) {
        org.deeplearning4j.nn.conf.layers.BasePretrainNetwork layerConf =
                (org.deeplearning4j.nn.conf.layers.BasePretrainNetwork) conf.getLayer();
        return super.numParams(conf) + layerConf.getNIn();
    }

    @Override
    public Map init(NeuralNetConfiguration conf, INDArray paramsView, boolean initializeParams) {
        Map params = super.init(conf, paramsView, initializeParams);

        org.deeplearning4j.nn.conf.layers.BasePretrainNetwork layerConf =
                (org.deeplearning4j.nn.conf.layers.BasePretrainNetwork) conf.getLayer();
        val nIn = layerConf.getNIn();
        val nOut = layerConf.getNOut();
        val nWeightParams = nIn * nOut;

        INDArray paramsViewReshape = paramsView.reshape(paramsView.length());
        INDArray visibleBiasView = paramsViewReshape.get(
                NDArrayIndex.interval(nWeightParams + nOut, nWeightParams + nOut + nIn));
        params.put(VISIBLE_BIAS_KEY, createVisibleBias(conf, visibleBiasView, initializeParams));
        conf.addVariable(VISIBLE_BIAS_KEY);

        return params;
    }

    protected INDArray createVisibleBias(NeuralNetConfiguration conf, INDArray visibleBiasView,
                                         boolean initializeParameters) {
        org.deeplearning4j.nn.conf.layers.BasePretrainNetwork layerConf =
                (org.deeplearning4j.nn.conf.layers.BasePretrainNetwork) conf.getLayer();
        if (initializeParameters) {
            INDArray ret = Nd4j.valueArrayOf(new long[]{1, layerConf.getNIn()}, layerConf.getVisibleBiasInit());
            visibleBiasView.assign(ret);
        }
        return visibleBiasView;
    }


    @Override
    public Map getGradientsFromFlattened(NeuralNetConfiguration conf, INDArray gradientView) {
        Map out = super.getGradientsFromFlattened(conf, gradientView);
        org.deeplearning4j.nn.conf.layers.FeedForwardLayer layerConf =
                (org.deeplearning4j.nn.conf.layers.FeedForwardLayer) conf.getLayer();

        val nIn = layerConf.getNIn();
        val nOut = layerConf.getNOut();
        val nWeightParams = nIn * nOut;
        INDArray gradientViewReshape = gradientView.reshape(gradientView.length());
        INDArray vBiasView = gradientViewReshape.get(
                NDArrayIndex.interval(nWeightParams + nOut, nWeightParams + nOut + nIn));

        out.put(VISIBLE_BIAS_KEY, vBiasView);

        return out;
    }
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy