diff --git a/train.lua b/train.lua index 2029370..ccd3260 100644 --- a/train.lua +++ b/train.lua @@ -63,12 +63,12 @@ elseif options.opencl then end -- Run the experiment +local optimState = {learningRate=options.learningRate,momentum=options.momentum} for epoch = 1, options.maxEpoch do collectgarbage() local nextBatch = dataset:batches(options.batchSize) local params, gradParams = model:getParameters() - local optimState = {learningRate=options.learningRate,momentum=options.momentum} -- Define optimizer local function feval(x)