Change the learning rate decay rate
This commit is contained in:
parent
7814691cbf
commit
8fec6f1b5a
1 changed files with 1 additions and 1 deletions
|
@ -302,7 +302,7 @@ local function train()
|
||||||
else
|
else
|
||||||
lrd_count = lrd_count + 1
|
lrd_count = lrd_count + 1
|
||||||
if lrd_count > 2 then
|
if lrd_count > 2 then
|
||||||
adam_config.learningRate = adam_config.learningRate * 0.8
|
adam_config.learningRate = adam_config.learningRate * 0.874
|
||||||
print("* learning rate decay: " .. adam_config.learningRate)
|
print("* learning rate decay: " .. adam_config.learningRate)
|
||||||
lrd_count = 0
|
lrd_count = 0
|
||||||
end
|
end
|
||||||
|
|
Loading…
Reference in a new issue