All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.deeplearning4j.nn.graph.vertex.impl.ElementWiseVertex Maven / Gradle / Ivy

/*
 *  ******************************************************************************
 *  *
 *  *
 *  * This program and the accompanying materials are made available under the
 *  * terms of the Apache License, Version 2.0 which is available at
 *  * https://www.apache.org/licenses/LICENSE-2.0.
 *  *
 *  *  See the NOTICE file distributed with this work for additional
 *  *  information regarding copyright ownership.
 *  * Unless required by applicable law or agreed to in writing, software
 *  * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
 *  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 *  * License for the specific language governing permissions and limitations
 *  * under the License.
 *  *
 *  * SPDX-License-Identifier: Apache-2.0
 *  *****************************************************************************
 */

package org.deeplearning4j.nn.graph.vertex.impl;

import org.deeplearning4j.nn.api.Layer;
import org.deeplearning4j.nn.api.MaskState;
import org.deeplearning4j.nn.gradient.Gradient;
import org.deeplearning4j.nn.graph.ComputationGraph;
import org.deeplearning4j.nn.graph.vertex.BaseGraphVertex;
import org.deeplearning4j.nn.graph.vertex.VertexIndices;
import org.nd4j.linalg.api.buffer.DataType;
import org.nd4j.linalg.api.memory.MemoryWorkspace;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.api.ops.CustomOp;
import org.nd4j.linalg.api.ops.DynamicCustomOp;
import org.nd4j.linalg.api.ops.impl.broadcast.BroadcastTo;
import org.nd4j.linalg.api.ops.impl.transforms.bool.MatchConditionTransform;
import org.nd4j.linalg.api.ops.impl.transforms.pairwise.arithmetic.SubOp;
import org.nd4j.linalg.api.ops.impl.transforms.pairwise.bool.Or;
import org.nd4j.linalg.api.shape.Shape;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.indexing.conditions.Conditions;
import org.nd4j.linalg.ops.transforms.Transforms;
import org.nd4j.common.primitives.Pair;
import org.deeplearning4j.nn.workspace.ArrayType;
import org.deeplearning4j.nn.workspace.LayerWorkspaceMgr;

import java.util.Arrays;

public class ElementWiseVertex extends BaseGraphVertex {

    public enum Op {
        Add, Subtract, Product, Average, Max
    }

    private Op op;
    private int nInForwardPass;

    public ElementWiseVertex(ComputationGraph graph, String name, int vertexIndex, Op op, DataType dataType) {
        this(graph, name, vertexIndex, null, null, op, dataType);
    }

    public ElementWiseVertex(ComputationGraph graph, String name, int vertexIndex, VertexIndices[] inputVertices,
                    VertexIndices[] outputVertices, Op op, DataType dataType) {
        super(graph, name, vertexIndex, inputVertices, outputVertices, dataType);
        this.op = op;
    }

    @Override
    public boolean hasLayer() {
        return false;
    }

    @Override
    public Layer getLayer() {
        return null;
    }

    @Override
    public INDArray doForward(boolean training, LayerWorkspaceMgr workspaceMgr) {
        if (!canDoForward())
            throw new IllegalStateException("Cannot do forward pass: inputs not set");

        nInForwardPass = inputs.length;
        if (inputs.length == 1)
            return workspaceMgr.dup(ArrayType.ACTIVATIONS, inputs[0]);

        boolean isBc = false;
        for(int i = 1; i < inputs.length; i++) {
            if(!inputs[0].equalShapes(inputs[i])) {
                isBc = true;
                break;
            }
        }

        long[] outShape;
        if(!isBc) {
            outShape = inputs[0].shape();
        } else {
            outShape = Shape.broadcastOutputShape(inputs[0].shape(), inputs[1].shape());
            for( int i = 2; i < inputs.length; i++) {
                outShape = Shape.broadcastOutputShape(outShape, inputs[i].shape());
            }
        }

        switch (op) {
            case Add:
                INDArray sum =  workspaceMgr.createUninitialized(ArrayType.ACTIVATIONS, dataType, outShape);
                if(isBc && !Arrays.equals(outShape, inputs[0].shape())) {
                    Nd4j.exec(new BroadcastTo(inputs[0], outShape, sum));
                } else {
                    sum.assign(inputs[0]);
                }

                for (int i = 1; i < inputs.length; i++) {
                    sum.addi(inputs[i].castTo(dataType));
                }
                return sum;
            case Average:
                INDArray average =  workspaceMgr.createUninitialized(ArrayType.ACTIVATIONS, dataType, outShape);
                if(isBc && !Arrays.equals(outShape, inputs[0].shape())){
                    Nd4j.exec(new BroadcastTo(inputs[0], outShape, average));
                } else {
                    average.assign(inputs[0]);
                }
                for (int i = 1; i < inputs.length; i++) {
                    average.addi(inputs[i].castTo(dataType));
                }
                return average.divi(inputs.length);
            case Subtract:
                if (inputs.length != 2)
                    throw new IllegalArgumentException("ElementWise subtraction only supports 2 inputs");
                return Nd4j.exec(new SubOp(inputs, new INDArray[]{workspaceMgr.createUninitialized(ArrayType.ACTIVATIONS, inputs[0].dataType(), outShape)}))[0];
            case Product:
                INDArray product =  workspaceMgr.createUninitialized(ArrayType.ACTIVATIONS, dataType, outShape);

                if(isBc && !Arrays.equals(outShape, inputs[0].shape())) {
                    Nd4j.exec(new BroadcastTo(inputs[0], outShape, product));
                } else {
                    product.assign(inputs[0]);
                }

                for (int i = 1; i < inputs.length; i++) {
                    product.muli(inputs[i].castTo(dataType));
                }
                return product;
            case Max:
                boolean isBroadcast = false;
                for(int i=1; i doBackward(boolean tbptt, LayerWorkspaceMgr workspaceMgr) {
        if (!canDoBackward())
            throw new IllegalStateException("Cannot do backward pass: errors not set");

        if (nInForwardPass == 1)
            return new Pair<>(null, new INDArray[] {workspaceMgr.dup(ArrayType.ACTIVATION_GRAD, epsilon)});

        boolean broadcastCase = false;
        for( int i=1; i input 0 backprops epsilon, input 1 backprops epsilon.sum(1,keepDim=true)
                        if(inputs[i].equalShapes(epsilon)){
                            out[i] = workspaceMgr.dup(ArrayType.ACTIVATION_GRAD, epsilon);
                        } else {
                            int[] bcDim = Shape.getBroadcastDimensions(inputs[i].shape(), epsilon.shape());
                            try(MemoryWorkspace ws = workspaceMgr.notifyScopeBorrowed(ArrayType.ACTIVATION_GRAD)){
                                out[i] = epsilon.sum(true, bcDim);
                            }
                        }
                    }
                }
                return new Pair<>(null, out);
            case Average:
                INDArray[] outAverage = new INDArray[nInForwardPass];
                try(MemoryWorkspace ws = workspaceMgr.notifyScopeBorrowed(ArrayType.ACTIVATION_GRAD)){
                    for (int i = 0; i < nInForwardPass; i++) {
                        if(inputs[i].equalShapes(epsilon)){
                            outAverage[i] = epsilon.div(nInForwardPass);
                        } else {
                            int[] bcDim = Shape.getBroadcastDimensions(inputs[i].shape(), epsilon.shape());
                            outAverage[i] = epsilon.div(nInForwardPass).sum(true, bcDim);
                        }
                    }
                }
                return new Pair<>(null, outAverage);
            case Subtract:
                INDArray[] out2 = new INDArray[2];
                if(!broadcastCase){
                    out2[0] = workspaceMgr.dup(ArrayType.ACTIVATION_GRAD, epsilon);
                    out2[1] = workspaceMgr.dup(ArrayType.ACTIVATION_GRAD, epsilon).negi();
                } else {
                    if(inputs[0].equalShapes(epsilon)){
                        //Second input is smaller/broadcast
                        out2[0] = workspaceMgr.dup(ArrayType.ACTIVATION_GRAD, epsilon);
                        int[] bcDim = Shape.getBroadcastDimensions(inputs[1].shape(), epsilon.shape());
                        try(MemoryWorkspace ws = workspaceMgr.notifyScopeBorrowed(ArrayType.ACTIVATION_GRAD)) {
                            out2[1] = epsilon.sum(true, bcDim).negi();
                        }
                    } else {
                        //First input is smaller/broadcast
                        int[] bcDim = Shape.getBroadcastDimensions(inputs[0].shape(), epsilon.shape());
                        try(MemoryWorkspace ws = workspaceMgr.notifyScopeBorrowed(ArrayType.ACTIVATION_GRAD)) {
                            out2[0] = epsilon.sum(true, bcDim);
                        }
                        out2[1] = workspaceMgr.dup(ArrayType.ACTIVATION_GRAD, epsilon).negi();
                    }
                }
                return new Pair<>(null, out2);
            case Product:
                INDArray[] out_product = new INDArray[nInForwardPass];
                INDArray[] inBc = inputs;
                if(broadcastCase){
                    inBc = new INDArray[inputs.length];
                    for( int i=0; i(null, out_product);
            case Max:
                INDArray[] outMax = new INDArray[nInForwardPass];
                INDArray maxIndices = workspaceMgr.createUninitialized(ArrayType.BP_WORKING_MEM, DataType.INT, epsilon.shape(), epsilon.ordering());

                INDArray[] bcIn = inputs;
                if(broadcastCase){
                    //Broadcast to right shape...
                    bcIn = new INDArray[inputs.length];
                    for( int i=0; i(null, outMax);
            default:
                throw new UnsupportedOperationException("Unknown op: " + this.op);
        }
    }

    @Override
    public void setBackpropGradientsViewArray(INDArray backpropGradientsViewArray) {
        if (backpropGradientsViewArray != null)
            throw new RuntimeException("Vertex does not have gradients; gradients view array cannot be set here");
    }

    @Override
    public Pair feedForwardMaskArrays(INDArray[] maskArrays, MaskState currentMaskState,
                    int minibatchSize) {
        if (maskArrays == null) {
            return new Pair<>(null, currentMaskState);
        }

        //Most common case: all or none.
        //If there's only *some* mask arrays: assume the others (missing) are equivalent to all 1s
        //And for handling multiple masks: best strategy seems to be an OR operation
        //i.e., output is 1 if any of the input are 1s
        //Which means: if any masks are missing, output null (equivalent to no mask, or all steps present)
        //Otherwise do an element-wise OR operation

        for (INDArray arr : maskArrays) {
            if (arr == null) {
                return new Pair<>(null, currentMaskState);
            }
        }

        //At this point: all present. Do OR operation
        if (maskArrays.length == 1) {
            return new Pair<>(maskArrays[0], currentMaskState);
        } else {
            INDArray ret = Nd4j.createUninitialized(DataType.BOOL, maskArrays[0].shape());  //maskArrays[0].dup(maskArrays[0].ordering());
            Nd4j.getExecutioner().exec(new Or(maskArrays[0].castTo(DataType.BOOL), maskArrays[1].castTo(DataType.BOOL), ret));
            for (int i = 2; i < maskArrays.length; i++) {
                Nd4j.getExecutioner().exec(new Or(maskArrays[i].castTo(DataType.BOOL), ret, ret));
            }
            return new Pair<>(ret.castTo(Nd4j.defaultFloatingPointType()), currentMaskState);
        }
    }

    @Override
    public String toString() {
        return "ElementWiseVertex(id=" + this.getVertexIndex() + ",name=\"" + this.getVertexName() + "\",op=" + op
                        + ")";
    }
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy