1
0
Fork 0
mirror of synced 2024-06-28 03:00:54 +12:00

Change the learning rate decay rate

This commit is contained in:
nagadomi 2016-05-21 09:56:26 +09:00
parent 7814691cbf
commit 8fec6f1b5a

View file

@ -302,7 +302,7 @@ local function train()
else else
lrd_count = lrd_count + 1 lrd_count = lrd_count + 1
if lrd_count > 2 then if lrd_count > 2 then
adam_config.learningRate = adam_config.learningRate * 0.8 adam_config.learningRate = adam_config.learningRate * 0.874
print("* learning rate decay: " .. adam_config.learningRate) print("* learning rate decay: " .. adam_config.learningRate)
lrd_count = 0 lrd_count = 0
end end