1
0
Fork 0
mirror of synced 2024-05-18 11:52:17 +12:00
waifu2x/train.lua

291 lines
9.6 KiB
Lua
Raw Normal View History

2015-11-08 22:31:46 +13:00
require 'pl'
2015-10-28 19:30:47 +13:00
local __FILE__ = (function() return string.gsub(debug.getinfo(2, 'S').source, "^@", "") end)()
package.path = path.join(path.dirname(__FILE__), "lib", "?.lua;") .. package.path
2015-05-16 17:48:05 +12:00
require 'optim'
require 'xlua'
2015-10-28 19:30:47 +13:00
require 'w2nn'
local settings = require 'settings'
local srcnn = require 'srcnn'
local minibatch_adam = require 'minibatch_adam'
local iproc = require 'iproc'
local reconstruct = require 'reconstruct'
local compression = require 'compression'
local pairwise_transform = require 'pairwise_transform'
local image_loader = require 'image_loader'
2015-05-16 17:48:05 +12:00
local function save_test_scale(model, rgb, file)
2016-04-03 02:03:27 +13:00
local up = reconstruct.scale(model, settings.scale, rgb, 128, settings.upsampling_filter)
2015-05-16 17:48:05 +12:00
image.save(file, up)
end
local function save_test_jpeg(model, rgb, file)
local im, count = reconstruct.image(model, rgb)
2015-05-16 17:48:05 +12:00
image.save(file, im)
end
local function split_data(x, test_size)
local index = torch.randperm(#x)
local train_size = #x - test_size
local train_x = {}
local valid_x = {}
for i = 1, train_size do
train_x[i] = x[index[i]]
end
for i = 1, test_size do
valid_x[i] = x[index[train_size + i]]
end
return train_x, valid_x
end
2015-11-30 21:18:52 +13:00
local function make_validation_set(x, transformer, n, patches)
2015-05-16 17:48:05 +12:00
n = n or 4
local data = {}
for i = 1, #x do
2015-11-30 21:18:52 +13:00
for k = 1, math.max(n / patches, 1) do
local xy = transformer(x[i], true, patches)
for j = 1, #xy do
2016-04-16 06:23:37 +12:00
table.insert(data, {x = xy[j][1], y = xy[j][2]})
end
2015-05-16 17:48:05 +12:00
end
xlua.progress(i, #x)
collectgarbage()
end
return data
end
2016-04-16 06:23:37 +12:00
local function validate(model, criterion, data, batch_size)
2015-05-16 17:48:05 +12:00
local loss = 0
2016-04-16 06:23:37 +12:00
local loss_count = 0
local inputs_tmp = torch.Tensor(batch_size,
data[1].x:size(1),
data[1].x:size(2),
data[1].x:size(3)):zero()
local targets_tmp = torch.Tensor(batch_size,
data[1].y:size(1),
data[1].y:size(2),
data[1].y:size(3)):zero()
local inputs = inputs_tmp:clone():cuda()
local targets = targets_tmp:clone():cuda()
for t = 1, #data, batch_size do
if t + batch_size -1 > #data then
break
end
for i = 1, batch_size do
inputs_tmp[i]:copy(data[t + i - 1].x)
targets_tmp[i]:copy(data[t + i - 1].y)
end
inputs:copy(inputs_tmp)
targets:copy(targets_tmp)
local z = model:forward(inputs)
loss = loss + criterion:forward(z, targets)
loss_count = loss_count + 1
if t % 10 == 0 then
xlua.progress(t, #data)
2015-05-16 17:48:05 +12:00
collectgarbage()
end
end
xlua.progress(#data, #data)
2016-04-16 06:23:37 +12:00
return loss / loss_count
2015-05-16 17:48:05 +12:00
end
local function create_criterion(model)
if reconstruct.is_rgb(model) then
local offset = reconstruct.offset_size(model)
local output_w = settings.crop_size - offset * 2
local weight = torch.Tensor(3, output_w * output_w)
weight[1]:fill(0.29891 * 3) -- R
weight[2]:fill(0.58661 * 3) -- G
weight[3]:fill(0.11448 * 3) -- B
return w2nn.ClippedWeightedHuberCriterion(weight, 0.1, {0.0, 1.0}):cuda()
else
2016-03-11 15:13:45 +13:00
local offset = reconstruct.offset_size(model)
local output_w = settings.crop_size - offset * 2
local weight = torch.Tensor(1, output_w * output_w)
weight[1]:fill(1.0)
return w2nn.ClippedWeightedHuberCriterion(weight, 0.1, {0.0, 1.0}):cuda()
end
end
local function transformer(x, is_validation, n, offset)
2015-10-28 19:30:47 +13:00
x = compression.decompress(x)
2015-11-30 21:18:52 +13:00
n = n or settings.patches
if is_validation == nil then is_validation = false end
2015-11-07 11:18:22 +13:00
local random_color_noise_rate = nil
local random_overlay_rate = nil
2015-11-06 14:08:54 +13:00
local active_cropping_rate = nil
local active_cropping_tries = nil
if is_validation then
active_cropping_rate = settings.active_cropping_rate
active_cropping_tries = settings.active_cropping_tries
2015-11-07 11:18:22 +13:00
random_color_noise_rate = 0.0
random_overlay_rate = 0.0
else
active_cropping_rate = settings.active_cropping_rate
active_cropping_tries = settings.active_cropping_tries
2015-11-07 11:18:22 +13:00
random_color_noise_rate = settings.random_color_noise_rate
random_overlay_rate = settings.random_overlay_rate
end
if settings.method == "scale" then
return pairwise_transform.scale(x,
settings.scale,
settings.crop_size, offset,
n,
2015-11-07 11:18:22 +13:00
{
2016-03-17 21:58:37 +13:00
downsampling_filters = settings.downsampling_filters,
2016-04-03 02:03:27 +13:00
upsampling_filter = settings.upsampling_filter,
2015-11-07 11:18:22 +13:00
random_half_rate = settings.random_half_rate,
random_color_noise_rate = random_color_noise_rate,
random_overlay_rate = random_overlay_rate,
random_unsharp_mask_rate = settings.random_unsharp_mask_rate,
2015-11-07 11:18:22 +13:00
max_size = settings.max_size,
active_cropping_rate = active_cropping_rate,
active_cropping_tries = active_cropping_tries,
rgb = (settings.color == "rgb"),
gamma_correction = settings.gamma_correction
})
elseif settings.method == "noise" then
return pairwise_transform.jpeg(x,
2015-11-06 14:08:54 +13:00
settings.style,
settings.noise_level,
settings.crop_size, offset,
n,
2015-11-07 11:18:22 +13:00
{
random_half_rate = settings.random_half_rate,
random_color_noise_rate = random_color_noise_rate,
random_overlay_rate = random_overlay_rate,
random_unsharp_mask_rate = settings.random_unsharp_mask_rate,
2015-11-07 11:18:22 +13:00
max_size = settings.max_size,
jpeg_chroma_subsampling_rate = settings.jpeg_chroma_subsampling_rate,
2015-11-07 11:18:22 +13:00
active_cropping_rate = active_cropping_rate,
active_cropping_tries = active_cropping_tries,
nr_rate = settings.nr_rate,
rgb = (settings.color == "rgb")
})
end
end
2015-11-30 21:18:52 +13:00
local function resampling(x, y, train_x, transformer, input_size, target_size)
print("## resampling")
for t = 1, #train_x do
xlua.progress(t, #train_x)
local xy = transformer(train_x[t], false, settings.patches)
for i = 1, #xy do
local index = (t - 1) * settings.patches + i
x[index]:copy(xy[i][1])
y[index]:copy(xy[i][2])
end
if t % 50 == 0 then
collectgarbage()
end
end
end
2016-03-14 09:06:14 +13:00
local function plot(train, valid)
gnuplot.plot({
{'training', torch.Tensor(train), '-'},
{'validation', torch.Tensor(valid), '-'}})
end
2015-05-16 17:48:05 +12:00
local function train()
2016-03-14 09:06:14 +13:00
local hist_train = {}
local hist_valid = {}
2015-11-30 21:18:52 +13:00
local LR_MIN = 1.0e-5
local model = srcnn.create(settings.method, settings.backend, settings.color)
local offset = reconstruct.offset_size(model)
local pairwise_func = function(x, is_validation, n)
return transformer(x, is_validation, n, offset)
end
local criterion = create_criterion(model)
2016-03-12 10:53:42 +13:00
local eval_metric = w2nn.PSNRCriterion():cuda()
2015-05-16 17:48:05 +12:00
local x = torch.load(settings.images)
2015-11-06 14:08:54 +13:00
local train_x, valid_x = split_data(x, math.floor(settings.validation_rate * #x))
2015-05-16 17:48:05 +12:00
local adam_config = {
learningRate = settings.learning_rate,
xBatchSize = settings.batch_size,
}
2015-11-30 21:18:52 +13:00
local lrd_count = 0
local ch = nil
if settings.color == "y" then
ch = 1
elseif settings.color == "rgb" then
ch = 3
end
2016-03-12 10:53:42 +13:00
local best_score = 0.0
2015-05-16 17:48:05 +12:00
print("# make validation-set")
local valid_xy = make_validation_set(valid_x, pairwise_func,
settings.validation_crops,
2015-11-30 21:18:52 +13:00
settings.patches)
2015-05-16 17:48:05 +12:00
valid_x = nil
collectgarbage()
model:cuda()
print("load .. " .. #train_x)
2015-11-30 21:18:52 +13:00
local x = torch.Tensor(settings.patches * #train_x,
ch, settings.crop_size, settings.crop_size)
local y = torch.Tensor(settings.patches * #train_x,
ch * (settings.crop_size - offset * 2) * (settings.crop_size - offset * 2)):zero()
2015-05-16 17:48:05 +12:00
for epoch = 1, settings.epoch do
model:training()
print("# " .. epoch)
2015-11-30 21:18:52 +13:00
resampling(x, y, train_x, pairwise_func)
for i = 1, settings.inner_epoch do
2016-03-14 09:06:14 +13:00
local train_score = minibatch_adam(model, criterion, eval_metric, x, y, adam_config)
print(train_score)
2015-11-30 21:18:52 +13:00
model:evaluate()
print("# validation")
2016-04-16 06:23:37 +12:00
local score = validate(model, eval_metric, valid_xy, adam_config.xBatchSize)
2016-03-14 09:06:14 +13:00
table.insert(hist_train, train_score.PSNR)
table.insert(hist_valid, score)
if settings.plot then
plot(hist_train, hist_valid)
end
2016-03-12 10:53:42 +13:00
if score > best_score then
2015-11-30 21:18:52 +13:00
local test_image = image_loader.load_float(settings.test) -- reload
lrd_count = 0
2015-11-30 21:18:52 +13:00
best_score = score
print("* update best model")
2015-12-04 22:49:34 +13:00
if settings.save_history then
torch.save(string.format(settings.model_file, epoch, i), model:clearState(), "ascii")
2015-12-04 22:49:34 +13:00
if settings.method == "noise" then
local log = path.join(settings.model_dir,
("noise%d_best.%d-%d.png"):format(settings.noise_level,
epoch, i))
save_test_jpeg(model, test_image, log)
elseif settings.method == "scale" then
local log = path.join(settings.model_dir,
("scale%.1f_best.%d-%d.png"):format(settings.scale,
epoch, i))
save_test_scale(model, test_image, log)
end
else
torch.save(settings.model_file, model:clearState(), "ascii")
2015-12-04 22:49:34 +13:00
if settings.method == "noise" then
local log = path.join(settings.model_dir,
("noise%d_best.png"):format(settings.noise_level))
save_test_jpeg(model, test_image, log)
elseif settings.method == "scale" then
local log = path.join(settings.model_dir,
("scale%.1f_best.png"):format(settings.scale))
save_test_scale(model, test_image, log)
end
2015-11-30 21:18:52 +13:00
end
else
lrd_count = lrd_count + 1
if lrd_count > 2 and adam_config.learningRate > LR_MIN then
adam_config.learningRate = adam_config.learningRate * 0.8
print("* learning rate decay: " .. adam_config.learningRate)
lrd_count = 0
end
2015-05-16 17:48:05 +12:00
end
2015-11-30 21:18:52 +13:00
print("current: " .. score .. ", best: " .. best_score)
collectgarbage()
2015-05-16 17:48:05 +12:00
end
end
end
2015-11-13 23:26:58 +13:00
if settings.gpu > 0 then
cutorch.setDevice(settings.gpu)
end
2015-05-16 17:48:05 +12:00
torch.manualSeed(settings.seed)
cutorch.manualSeed(settings.seed)
print(settings)
train()