All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.deeplearning4j.scalnet.layers.embeddings.EmbeddingLayer.scala Maven / Gradle / Ivy

Go to download

A Scala wrapper for Deeplearning4j, inspired by Keras. Scala + DL + Spark + GPUs

There is a newer version: 1.0.0-beta7
Show newest version
/*******************************************************************************
  * Copyright (c) 2015-2018 Skymind, Inc.
  *
  * This program and the accompanying materials are made available under the
  * terms of the Apache License, Version 2.0 which is available at
  * https://www.apache.org/licenses/LICENSE-2.0.
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
  * License for the specific language governing permissions and limitations
  * under the License.
  *
  * SPDX-License-Identifier: Apache-2.0
 ******************************************************************************/
package org.deeplearning4j.scalnet.layers.embeddings

import org.deeplearning4j.nn.conf.layers
import org.deeplearning4j.nn.weights.WeightInit
import org.deeplearning4j.scalnet.layers.core.Layer
import org.deeplearning4j.scalnet.regularizers.{ NoRegularizer, WeightRegularizer }
import org.nd4j.linalg.activations.Activation

class EmbeddingLayer(nIn: Int,
                     nOut: Int,
                     activation: Activation,
                     weightInit: WeightInit,
                     regularizer: WeightRegularizer,
                     dropOut: Double = 0.0,
                     override val name: String = "")
    extends Layer {

  override def compile: org.deeplearning4j.nn.conf.layers.Layer =
    new layers.EmbeddingLayer.Builder()
      .nIn(nIn)
      .nOut(nOut)
      .activation(activation)
      .weightInit(weightInit)
      .l1(regularizer.l1)
      .l2(regularizer.l2)
      .dropOut(dropOut)
      .name(name)
      .build()

  override def inputShape: List[Int] = List(nIn, nOut)

  override def outputShape: List[Int] = List(nOut, nIn)
}

object EmbeddingLayer {
  def apply(nIn: Int,
            nOut: Int,
            activation: Activation = Activation.IDENTITY,
            weightInit: WeightInit = WeightInit.XAVIER,
            regularizer: WeightRegularizer = NoRegularizer(),
            dropOut: Double = 0.0): EmbeddingLayer =
    new EmbeddingLayer(
      nIn,
      nOut,
      activation,
      weightInit,
      regularizer,
      dropOut
    )
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy