com.intel.analytics.bigdl.example.keras.Train.scala Maven / Gradle / Ivy
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.example.keras
import com.intel.analytics.bigdl._
import com.intel.analytics.bigdl.dataset.DataSet
import com.intel.analytics.bigdl.dataset.image.{BytesToGreyImg, GreyImgNormalizer, GreyImgToBatch}
import com.intel.analytics.bigdl.nn.ClassNLLCriterion
import com.intel.analytics.bigdl.numeric.NumericFloat
import com.intel.analytics.bigdl.optim._
import com.intel.analytics.bigdl.utils.Engine
import com.intel.analytics.bigdl.models.lenet.LeNet5
import org.apache.spark.SparkContext
object Train {
import models.lenet.Utils._
def main(args: Array[String]): Unit = {
trainParser.parse(args, new TrainParams()).map(param => {
val conf = Engine.createSparkConf()
.setAppName("Train Lenet on MNIST")
.set("spark.task.maxFailures", "1")
val sc = new SparkContext(conf)
Engine.init
val trainData = param.folder + "/train-images-idx3-ubyte"
val trainLabel = param.folder + "/train-labels-idx1-ubyte"
val validationData = param.folder + "/t10k-images-idx3-ubyte"
val validationLabel = param.folder + "/t10k-labels-idx1-ubyte"
val model = if (param.graphModel) LeNet5.kerasGraph(classNum = 10)
else LeNet5.keras(classNum = 10)
val optimMethod = if (param.stateSnapshot.isDefined) {
OptimMethod.load[Float](param.stateSnapshot.get)
} else {
new SGD[Float](learningRate = param.learningRate,
learningRateDecay = param.learningRateDecay)
}
val trainSet = DataSet.array(load(trainData, trainLabel), sc) ->
BytesToGreyImg(28, 28) -> GreyImgNormalizer(trainMean, trainStd) -> GreyImgToBatch(
param.batchSize)
val validationSet = DataSet.array(load(validationData, validationLabel), sc) ->
BytesToGreyImg(28, 28) -> GreyImgNormalizer(testMean, testStd) -> GreyImgToBatch(
param.batchSize)
model.compile(optimizer = optimMethod,
loss = ClassNLLCriterion[Float](logProbAsInput = false),
metrics = Array(new Top1Accuracy[Float](), new Top5Accuracy[Float](), new Loss[Float]))
model.fit(trainSet, nbEpoch = param.maxEpoch, validationData = validationSet)
sc.stop()
})
}
}