2015-11-08 22:31:46 +13:00
|
|
|
require 'pl'
|
2015-10-28 19:30:47 +13:00
|
|
|
local __FILE__ = (function() return string.gsub(debug.getinfo(2, 'S').source, "^@", "") end)()
|
|
|
|
package.path = path.join(path.dirname(__FILE__), "lib", "?.lua;") .. package.path
|
2015-05-16 17:48:05 +12:00
|
|
|
require 'optim'
|
|
|
|
require 'xlua'
|
2016-09-11 08:07:42 +12:00
|
|
|
require 'image'
|
2015-10-28 19:30:47 +13:00
|
|
|
require 'w2nn'
|
2016-09-11 08:07:42 +12:00
|
|
|
local threads = require 'threads'
|
2015-10-28 19:30:47 +13:00
|
|
|
local settings = require 'settings'
|
|
|
|
local srcnn = require 'srcnn'
|
|
|
|
local minibatch_adam = require 'minibatch_adam'
|
|
|
|
local iproc = require 'iproc'
|
|
|
|
local reconstruct = require 'reconstruct'
|
|
|
|
local image_loader = require 'image_loader'
|
2015-05-16 17:48:05 +12:00
|
|
|
|
|
|
|
local function save_test_scale(model, rgb, file)
|
2016-06-10 10:34:11 +12:00
|
|
|
local up = reconstruct.scale(model, settings.scale, rgb)
|
2015-05-16 17:48:05 +12:00
|
|
|
image.save(file, up)
|
|
|
|
end
|
|
|
|
local function save_test_jpeg(model, rgb, file)
|
2015-10-26 13:23:52 +13:00
|
|
|
local im, count = reconstruct.image(model, rgb)
|
2015-05-16 17:48:05 +12:00
|
|
|
image.save(file, im)
|
|
|
|
end
|
2016-07-05 05:42:40 +12:00
|
|
|
local function save_test_user(model, rgb, file)
|
|
|
|
if settings.scale == 1 then
|
|
|
|
save_test_jpeg(model, rgb, file)
|
|
|
|
else
|
|
|
|
save_test_scale(model, rgb, file)
|
|
|
|
end
|
|
|
|
end
|
2015-05-16 17:48:05 +12:00
|
|
|
local function split_data(x, test_size)
|
2017-04-15 19:29:38 +12:00
|
|
|
if settings.validation_filename_split then
|
|
|
|
if not (x[1][2].data and x[1][2].data.basename) then
|
|
|
|
error("`images.t` does not have basename info. You need to re-run `convert_data.lua`.")
|
|
|
|
end
|
|
|
|
local basename_db = {}
|
|
|
|
for i = 1, #x do
|
|
|
|
local meta = x[i][2].data
|
|
|
|
if basename_db[meta.basename] then
|
|
|
|
table.insert(basename_db[meta.basename], x[i])
|
|
|
|
else
|
|
|
|
basename_db[meta.basename] = {x[i]}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
local basename_list = {}
|
|
|
|
for k, v in pairs(basename_db) do
|
|
|
|
table.insert(basename_list, v)
|
|
|
|
end
|
|
|
|
local index = torch.randperm(#basename_list)
|
|
|
|
local train_x = {}
|
|
|
|
local valid_x = {}
|
|
|
|
local pos = 1
|
|
|
|
for i = 1, #basename_list do
|
|
|
|
if #valid_x >= test_size then
|
|
|
|
break
|
|
|
|
end
|
|
|
|
local xs = basename_list[index[pos]]
|
|
|
|
for j = 1, #xs do
|
|
|
|
table.insert(valid_x, xs[j])
|
|
|
|
end
|
|
|
|
pos = pos + 1
|
|
|
|
end
|
|
|
|
for i = pos, #basename_list do
|
|
|
|
local xs = basename_list[index[i]]
|
|
|
|
for j = 1, #xs do
|
|
|
|
table.insert(train_x, xs[j])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
return train_x, valid_x
|
|
|
|
else
|
|
|
|
local index = torch.randperm(#x)
|
|
|
|
local train_size = #x - test_size
|
|
|
|
local train_x = {}
|
|
|
|
local valid_x = {}
|
|
|
|
for i = 1, train_size do
|
|
|
|
train_x[i] = x[index[i]]
|
|
|
|
end
|
|
|
|
for i = 1, test_size do
|
|
|
|
valid_x[i] = x[index[train_size + i]]
|
|
|
|
end
|
|
|
|
return train_x, valid_x
|
2015-05-16 17:48:05 +12:00
|
|
|
end
|
|
|
|
end
|
2016-09-11 08:07:42 +12:00
|
|
|
|
|
|
|
local g_transform_pool = nil
|
2016-09-24 08:51:47 +12:00
|
|
|
local g_mutex = nil
|
|
|
|
local g_mutex_id = nil
|
2016-09-11 08:07:42 +12:00
|
|
|
local function transform_pool_init(has_resize, offset)
|
2016-09-11 23:56:56 +12:00
|
|
|
local nthread = torch.getnumthreads()
|
|
|
|
if (settings.thread > 0) then
|
|
|
|
nthread = settings.thread
|
|
|
|
end
|
2016-09-24 08:51:47 +12:00
|
|
|
g_mutex = threads.Mutex()
|
|
|
|
g_mutex_id = g_mutex:id()
|
2016-09-11 08:07:42 +12:00
|
|
|
g_transform_pool = threads.Threads(
|
2016-09-11 23:56:56 +12:00
|
|
|
nthread,
|
2016-10-21 04:21:57 +13:00
|
|
|
threads.safe(
|
2016-09-11 08:07:42 +12:00
|
|
|
function(threadid)
|
|
|
|
require 'pl'
|
|
|
|
local __FILE__ = (function() return string.gsub(debug.getinfo(2, 'S').source, "^@", "") end)()
|
|
|
|
package.path = path.join(path.dirname(__FILE__), "lib", "?.lua;") .. package.path
|
2016-09-24 10:43:08 +12:00
|
|
|
require 'torch'
|
2016-09-11 08:07:42 +12:00
|
|
|
require 'nn'
|
|
|
|
require 'cunn'
|
2016-09-24 08:51:47 +12:00
|
|
|
|
2016-09-24 10:43:08 +12:00
|
|
|
torch.setnumthreads(1)
|
|
|
|
torch.setdefaulttensortype("torch.FloatTensor")
|
|
|
|
|
|
|
|
local threads = require 'threads'
|
2016-09-11 08:07:42 +12:00
|
|
|
local compression = require 'compression'
|
|
|
|
local pairwise_transform = require 'pairwise_transform'
|
|
|
|
|
|
|
|
function transformer(x, is_validation, n)
|
2016-09-24 08:51:47 +12:00
|
|
|
local mutex = threads.Mutex(g_mutex_id)
|
2016-09-11 08:07:42 +12:00
|
|
|
local meta = {data = {}}
|
|
|
|
local y = nil
|
|
|
|
if type(x) == "table" and type(x[2]) == "table" then
|
|
|
|
meta = x[2]
|
|
|
|
if x[1].x and x[1].y then
|
|
|
|
y = compression.decompress(x[1].y)
|
|
|
|
x = compression.decompress(x[1].x)
|
|
|
|
else
|
|
|
|
x = compression.decompress(x[1])
|
|
|
|
end
|
|
|
|
else
|
|
|
|
x = compression.decompress(x)
|
|
|
|
end
|
|
|
|
n = n or settings.patches
|
|
|
|
if is_validation == nil then is_validation = false end
|
|
|
|
local random_color_noise_rate = nil
|
|
|
|
local random_overlay_rate = nil
|
|
|
|
local active_cropping_rate = nil
|
|
|
|
local active_cropping_tries = nil
|
|
|
|
if is_validation then
|
|
|
|
active_cropping_rate = settings.active_cropping_rate
|
|
|
|
active_cropping_tries = settings.active_cropping_tries
|
|
|
|
random_color_noise_rate = 0.0
|
|
|
|
random_overlay_rate = 0.0
|
|
|
|
else
|
|
|
|
active_cropping_rate = settings.active_cropping_rate
|
|
|
|
active_cropping_tries = settings.active_cropping_tries
|
|
|
|
random_color_noise_rate = settings.random_color_noise_rate
|
|
|
|
random_overlay_rate = settings.random_overlay_rate
|
|
|
|
end
|
|
|
|
if settings.method == "scale" then
|
|
|
|
local conf = tablex.update({
|
2016-09-24 08:51:47 +12:00
|
|
|
mutex = mutex,
|
2016-09-11 08:07:42 +12:00
|
|
|
downsampling_filters = settings.downsampling_filters,
|
|
|
|
random_half_rate = settings.random_half_rate,
|
|
|
|
random_color_noise_rate = random_color_noise_rate,
|
|
|
|
random_overlay_rate = random_overlay_rate,
|
|
|
|
random_unsharp_mask_rate = settings.random_unsharp_mask_rate,
|
2016-09-24 08:32:33 +12:00
|
|
|
random_blur_rate = settings.random_blur_rate,
|
|
|
|
random_blur_size = settings.random_blur_size,
|
|
|
|
random_blur_sigma_min = settings.random_blur_sigma_min,
|
|
|
|
random_blur_sigma_max = settings.random_blur_sigma_max,
|
2016-09-11 08:07:42 +12:00
|
|
|
max_size = settings.max_size,
|
|
|
|
active_cropping_rate = active_cropping_rate,
|
|
|
|
active_cropping_tries = active_cropping_tries,
|
|
|
|
rgb = (settings.color == "rgb"),
|
|
|
|
x_upsampling = not has_resize,
|
|
|
|
resize_blur_min = settings.resize_blur_min,
|
|
|
|
resize_blur_max = settings.resize_blur_max}, meta)
|
|
|
|
return pairwise_transform.scale(x,
|
|
|
|
settings.scale,
|
|
|
|
settings.crop_size, offset,
|
|
|
|
n, conf)
|
|
|
|
elseif settings.method == "noise" then
|
|
|
|
local conf = tablex.update({
|
2016-09-24 08:51:47 +12:00
|
|
|
mutex = mutex,
|
2016-09-11 08:07:42 +12:00
|
|
|
random_half_rate = settings.random_half_rate,
|
|
|
|
random_color_noise_rate = random_color_noise_rate,
|
|
|
|
random_overlay_rate = random_overlay_rate,
|
|
|
|
random_unsharp_mask_rate = settings.random_unsharp_mask_rate,
|
2016-09-24 08:32:33 +12:00
|
|
|
random_blur_rate = settings.random_blur_rate,
|
|
|
|
random_blur_size = settings.random_blur_size,
|
|
|
|
random_blur_sigma_min = settings.random_blur_sigma_min,
|
|
|
|
random_blur_sigma_max = settings.random_blur_sigma_max,
|
2016-09-11 08:07:42 +12:00
|
|
|
max_size = settings.max_size,
|
|
|
|
jpeg_chroma_subsampling_rate = settings.jpeg_chroma_subsampling_rate,
|
|
|
|
active_cropping_rate = active_cropping_rate,
|
|
|
|
active_cropping_tries = active_cropping_tries,
|
|
|
|
nr_rate = settings.nr_rate,
|
|
|
|
rgb = (settings.color == "rgb")}, meta)
|
|
|
|
return pairwise_transform.jpeg(x,
|
|
|
|
settings.style,
|
|
|
|
settings.noise_level,
|
|
|
|
settings.crop_size, offset,
|
|
|
|
n, conf)
|
|
|
|
elseif settings.method == "noise_scale" then
|
|
|
|
local conf = tablex.update({
|
2016-09-24 08:51:47 +12:00
|
|
|
mutex = mutex,
|
2016-09-11 08:07:42 +12:00
|
|
|
downsampling_filters = settings.downsampling_filters,
|
|
|
|
random_half_rate = settings.random_half_rate,
|
|
|
|
random_color_noise_rate = random_color_noise_rate,
|
|
|
|
random_overlay_rate = random_overlay_rate,
|
|
|
|
random_unsharp_mask_rate = settings.random_unsharp_mask_rate,
|
2016-09-24 08:32:33 +12:00
|
|
|
random_blur_rate = settings.random_blur_rate,
|
|
|
|
random_blur_size = settings.random_blur_size,
|
|
|
|
random_blur_sigma_min = settings.random_blur_sigma_min,
|
|
|
|
random_blur_sigma_max = settings.random_blur_sigma_max,
|
2016-09-11 08:07:42 +12:00
|
|
|
max_size = settings.max_size,
|
|
|
|
jpeg_chroma_subsampling_rate = settings.jpeg_chroma_subsampling_rate,
|
|
|
|
nr_rate = settings.nr_rate,
|
|
|
|
active_cropping_rate = active_cropping_rate,
|
|
|
|
active_cropping_tries = active_cropping_tries,
|
|
|
|
rgb = (settings.color == "rgb"),
|
|
|
|
x_upsampling = not has_resize,
|
|
|
|
resize_blur_min = settings.resize_blur_min,
|
|
|
|
resize_blur_max = settings.resize_blur_max}, meta)
|
|
|
|
return pairwise_transform.jpeg_scale(x,
|
|
|
|
settings.scale,
|
|
|
|
settings.style,
|
|
|
|
settings.noise_level,
|
|
|
|
settings.crop_size, offset,
|
|
|
|
n, conf)
|
|
|
|
elseif settings.method == "user" then
|
2018-07-27 15:32:17 +12:00
|
|
|
local random_erasing_rate = 0
|
|
|
|
local random_erasing_n = 0
|
|
|
|
local random_erasing_rect_min = 0
|
|
|
|
local random_erasing_rect_max = 0
|
|
|
|
if is_validation then
|
|
|
|
else
|
|
|
|
random_erasing_rate = settings.random_erasing_rate
|
|
|
|
random_erasing_n = settings.random_erasing_n
|
|
|
|
random_erasing_rect_min = settings.random_erasing_rect_min
|
|
|
|
random_erasing_rect_max = settings.random_erasing_rect_max
|
|
|
|
end
|
2016-09-11 08:07:42 +12:00
|
|
|
local conf = tablex.update({
|
2016-12-26 00:17:47 +13:00
|
|
|
gcn = settings.gcn,
|
2016-09-11 08:07:42 +12:00
|
|
|
max_size = settings.max_size,
|
|
|
|
active_cropping_rate = active_cropping_rate,
|
|
|
|
active_cropping_tries = active_cropping_tries,
|
2017-02-26 13:03:38 +13:00
|
|
|
random_pairwise_rotate_rate = settings.random_pairwise_rotate_rate,
|
2016-10-21 19:43:28 +13:00
|
|
|
random_pairwise_rotate_min = settings.random_pairwise_rotate_min,
|
|
|
|
random_pairwise_rotate_max = settings.random_pairwise_rotate_max,
|
2017-02-26 13:03:38 +13:00
|
|
|
random_pairwise_scale_rate = settings.random_pairwise_scale_rate,
|
2016-10-21 19:43:28 +13:00
|
|
|
random_pairwise_scale_min = settings.random_pairwise_scale_min,
|
|
|
|
random_pairwise_scale_max = settings.random_pairwise_scale_max,
|
2017-02-26 13:03:38 +13:00
|
|
|
random_pairwise_negate_rate = settings.random_pairwise_negate_rate,
|
|
|
|
random_pairwise_negate_x_rate = settings.random_pairwise_negate_x_rate,
|
2016-10-21 19:43:28 +13:00
|
|
|
pairwise_y_binary = settings.pairwise_y_binary,
|
2016-11-03 03:41:56 +13:00
|
|
|
pairwise_flip = settings.pairwise_flip,
|
2018-07-27 15:32:17 +12:00
|
|
|
random_erasing_rate = random_erasing_rate,
|
|
|
|
random_erasing_n = random_erasing_n,
|
|
|
|
random_erasing_rect_min = random_erasing_rect_min,
|
|
|
|
random_erasing_rect_max = random_erasing_rect_max,
|
2016-09-11 08:07:42 +12:00
|
|
|
rgb = (settings.color == "rgb")}, meta)
|
|
|
|
return pairwise_transform.user(x, y,
|
|
|
|
settings.crop_size, offset,
|
|
|
|
n, conf)
|
|
|
|
end
|
|
|
|
end
|
2016-10-21 04:21:57 +13:00
|
|
|
end)
|
2016-09-11 08:07:42 +12:00
|
|
|
)
|
|
|
|
g_transform_pool:synchronize()
|
|
|
|
end
|
|
|
|
|
|
|
|
local function make_validation_set(x, n, patches)
|
|
|
|
local nthread = torch.getnumthreads()
|
2016-09-11 23:56:56 +12:00
|
|
|
if (settings.thread > 0) then
|
|
|
|
nthread = settings.thread
|
|
|
|
end
|
2015-05-16 17:48:05 +12:00
|
|
|
n = n or 4
|
2016-06-20 18:56:39 +12:00
|
|
|
local validation_patches = math.min(16, patches or 16)
|
2015-05-16 17:48:05 +12:00
|
|
|
local data = {}
|
2016-09-11 08:07:42 +12:00
|
|
|
|
|
|
|
g_transform_pool:synchronize()
|
|
|
|
torch.setnumthreads(1) -- 1
|
|
|
|
|
2015-05-16 17:48:05 +12:00
|
|
|
for i = 1, #x do
|
2016-06-20 18:56:39 +12:00
|
|
|
for k = 1, math.max(n / validation_patches, 1) do
|
2016-09-11 08:07:42 +12:00
|
|
|
local input = x[i]
|
|
|
|
g_transform_pool:addjob(
|
|
|
|
function()
|
|
|
|
local xy = transformer(input, true, validation_patches)
|
|
|
|
return xy
|
|
|
|
end,
|
|
|
|
function(xy)
|
|
|
|
for j = 1, #xy do
|
|
|
|
table.insert(data, {x = xy[j][1], y = xy[j][2]})
|
|
|
|
end
|
|
|
|
end
|
|
|
|
)
|
2015-05-16 17:48:05 +12:00
|
|
|
end
|
2016-09-24 10:43:08 +12:00
|
|
|
if i % 20 == 0 then
|
|
|
|
collectgarbage()
|
|
|
|
g_transform_pool:synchronize()
|
|
|
|
xlua.progress(i, #x)
|
|
|
|
end
|
2015-05-16 17:48:05 +12:00
|
|
|
end
|
2016-09-11 08:07:42 +12:00
|
|
|
g_transform_pool:synchronize()
|
|
|
|
torch.setnumthreads(nthread) -- revert
|
|
|
|
|
2016-04-16 06:52:05 +12:00
|
|
|
local new_data = {}
|
|
|
|
local perm = torch.randperm(#data)
|
|
|
|
for i = 1, perm:size(1) do
|
|
|
|
new_data[i] = data[perm[i]]
|
|
|
|
end
|
|
|
|
data = new_data
|
2015-05-16 17:48:05 +12:00
|
|
|
return data
|
|
|
|
end
|
2016-05-28 13:34:59 +12:00
|
|
|
local function validate(model, criterion, eval_metric, data, batch_size)
|
2017-02-12 06:04:23 +13:00
|
|
|
local psnr = 0
|
2015-05-16 17:48:05 +12:00
|
|
|
local loss = 0
|
2016-05-28 13:34:59 +12:00
|
|
|
local mse = 0
|
2016-04-16 06:23:37 +12:00
|
|
|
local loss_count = 0
|
|
|
|
local inputs_tmp = torch.Tensor(batch_size,
|
|
|
|
data[1].x:size(1),
|
|
|
|
data[1].x:size(2),
|
|
|
|
data[1].x:size(3)):zero()
|
|
|
|
local targets_tmp = torch.Tensor(batch_size,
|
|
|
|
data[1].y:size(1),
|
|
|
|
data[1].y:size(2),
|
|
|
|
data[1].y:size(3)):zero()
|
|
|
|
local inputs = inputs_tmp:clone():cuda()
|
|
|
|
local targets = targets_tmp:clone():cuda()
|
|
|
|
for t = 1, #data, batch_size do
|
|
|
|
if t + batch_size -1 > #data then
|
|
|
|
break
|
|
|
|
end
|
|
|
|
for i = 1, batch_size do
|
|
|
|
inputs_tmp[i]:copy(data[t + i - 1].x)
|
|
|
|
targets_tmp[i]:copy(data[t + i - 1].y)
|
|
|
|
end
|
|
|
|
inputs:copy(inputs_tmp)
|
|
|
|
targets:copy(targets_tmp)
|
|
|
|
local z = model:forward(inputs)
|
2017-02-12 06:04:23 +13:00
|
|
|
local batch_mse = eval_metric:forward(z, targets)
|
2016-04-16 06:23:37 +12:00
|
|
|
loss = loss + criterion:forward(z, targets)
|
2017-02-12 06:04:23 +13:00
|
|
|
mse = mse + batch_mse
|
2017-02-23 12:08:18 +13:00
|
|
|
psnr = psnr + (10 * math.log10(1 / (batch_mse + 1.0e-6)))
|
2016-04-16 06:23:37 +12:00
|
|
|
loss_count = loss_count + 1
|
2016-04-16 06:45:21 +12:00
|
|
|
if loss_count % 10 == 0 then
|
2016-04-16 06:23:37 +12:00
|
|
|
xlua.progress(t, #data)
|
2015-05-16 17:48:05 +12:00
|
|
|
collectgarbage()
|
|
|
|
end
|
|
|
|
end
|
2015-11-01 01:56:20 +13:00
|
|
|
xlua.progress(#data, #data)
|
2017-02-12 06:04:23 +13:00
|
|
|
return {loss = loss / loss_count, MSE = mse / loss_count, PSNR = psnr / loss_count}
|
2015-05-16 17:48:05 +12:00
|
|
|
end
|
|
|
|
|
2016-07-09 18:05:11 +12:00
|
|
|
local function create_criterion(model)
|
2016-12-05 14:32:26 +13:00
|
|
|
if settings.loss == "huber" then
|
|
|
|
if reconstruct.is_rgb(model) then
|
|
|
|
local offset = reconstruct.offset_size(model)
|
|
|
|
local output_w = settings.crop_size - offset * 2
|
|
|
|
local weight = torch.Tensor(3, output_w * output_w)
|
|
|
|
weight[1]:fill(0.29891 * 3) -- R
|
|
|
|
weight[2]:fill(0.58661 * 3) -- G
|
|
|
|
weight[3]:fill(0.11448 * 3) -- B
|
|
|
|
return w2nn.ClippedWeightedHuberCriterion(weight, 0.1, {0.0, 1.0}):cuda()
|
|
|
|
else
|
|
|
|
local offset = reconstruct.offset_size(model)
|
|
|
|
local output_w = settings.crop_size - offset * 2
|
|
|
|
local weight = torch.Tensor(1, output_w * output_w)
|
|
|
|
weight[1]:fill(1.0)
|
|
|
|
return w2nn.ClippedWeightedHuberCriterion(weight, 0.1, {0.0, 1.0}):cuda()
|
|
|
|
end
|
|
|
|
elseif settings.loss == "l1" then
|
|
|
|
return w2nn.L1Criterion():cuda()
|
2017-01-09 17:00:00 +13:00
|
|
|
elseif settings.loss == "mse" then
|
|
|
|
return w2nn.ClippedMSECriterion(0, 1.0):cuda()
|
2017-02-23 12:48:00 +13:00
|
|
|
elseif settings.loss == "bce" then
|
|
|
|
local bce = nn.BCECriterion()
|
|
|
|
bce.sizeAverage = true
|
|
|
|
return bce:cuda()
|
2015-10-26 13:23:52 +13:00
|
|
|
else
|
2016-12-05 14:32:26 +13:00
|
|
|
error("unsupported loss .." .. settings.loss)
|
2015-10-26 13:23:52 +13:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-09-11 08:07:42 +12:00
|
|
|
local function resampling(x, y, train_x)
|
2016-05-27 19:49:42 +12:00
|
|
|
local c = 1
|
|
|
|
local shuffle = torch.randperm(#train_x)
|
2016-09-11 23:56:56 +12:00
|
|
|
local nthread = torch.getnumthreads()
|
|
|
|
if (settings.thread > 0) then
|
|
|
|
nthread = settings.thread
|
|
|
|
end
|
2016-09-11 08:07:42 +12:00
|
|
|
torch.setnumthreads(1) -- 1
|
2016-09-11 23:56:56 +12:00
|
|
|
|
2015-11-30 21:18:52 +13:00
|
|
|
for t = 1, #train_x do
|
2016-09-11 08:07:42 +12:00
|
|
|
local input = train_x[shuffle[t]]
|
|
|
|
g_transform_pool:addjob(
|
|
|
|
function()
|
|
|
|
local xy = transformer(input, false, settings.patches)
|
|
|
|
return xy
|
|
|
|
end,
|
|
|
|
function(xy)
|
|
|
|
for i = 1, #xy do
|
|
|
|
if c <= x:size(1) then
|
|
|
|
x[c]:copy(xy[i][1])
|
|
|
|
y[c]:copy(xy[i][2])
|
|
|
|
c = c + 1
|
|
|
|
else
|
|
|
|
break
|
|
|
|
end
|
|
|
|
end
|
2016-05-27 19:49:42 +12:00
|
|
|
end
|
2016-09-11 08:07:42 +12:00
|
|
|
)
|
|
|
|
if t % 50 == 0 then
|
|
|
|
collectgarbage()
|
2016-09-24 10:43:08 +12:00
|
|
|
g_transform_pool:synchronize()
|
|
|
|
xlua.progress(t, #train_x)
|
2016-05-27 19:49:42 +12:00
|
|
|
end
|
|
|
|
if c > x:size(1) then
|
|
|
|
break
|
2015-11-30 21:18:52 +13:00
|
|
|
end
|
|
|
|
end
|
2016-09-11 08:07:42 +12:00
|
|
|
g_transform_pool:synchronize()
|
2016-05-27 19:49:42 +12:00
|
|
|
xlua.progress(#train_x, #train_x)
|
2016-09-11 08:07:42 +12:00
|
|
|
torch.setnumthreads(nthread) -- revert
|
2016-05-27 19:49:42 +12:00
|
|
|
end
|
|
|
|
local function get_oracle_data(x, y, instance_loss, k, samples)
|
|
|
|
local index = torch.LongTensor(instance_loss:size(1))
|
|
|
|
local dummy = torch.Tensor(instance_loss:size(1))
|
|
|
|
torch.topk(dummy, index, instance_loss, k, 1, true)
|
2016-06-08 10:52:38 +12:00
|
|
|
print("MSE of all data: " ..instance_loss:mean() .. ", MSE of oracle data: " .. dummy:mean())
|
2016-05-27 19:49:42 +12:00
|
|
|
local shuffle = torch.randperm(k)
|
|
|
|
local x_s = x:size()
|
|
|
|
local y_s = y:size()
|
|
|
|
x_s[1] = samples
|
|
|
|
y_s[1] = samples
|
|
|
|
local oracle_x = torch.Tensor(table.unpack(torch.totable(x_s)))
|
|
|
|
local oracle_y = torch.Tensor(table.unpack(torch.totable(y_s)))
|
|
|
|
|
|
|
|
for i = 1, samples do
|
|
|
|
oracle_x[i]:copy(x[index[shuffle[i]]])
|
|
|
|
oracle_y[i]:copy(y[index[shuffle[i]]])
|
|
|
|
end
|
|
|
|
return oracle_x, oracle_y
|
2015-11-30 21:18:52 +13:00
|
|
|
end
|
2016-05-27 19:49:42 +12:00
|
|
|
|
2016-05-13 12:49:53 +12:00
|
|
|
local function remove_small_image(x)
|
2016-09-11 08:07:42 +12:00
|
|
|
local compression = require 'compression'
|
2016-05-13 12:49:53 +12:00
|
|
|
local new_x = {}
|
|
|
|
for i = 1, #x do
|
2016-06-06 17:04:13 +12:00
|
|
|
local xe, meta, x_s
|
|
|
|
xe = x[i]
|
2016-07-05 05:42:40 +12:00
|
|
|
if type(x) == "table" and type(x[2]) == "table" then
|
|
|
|
if xe[1].x and xe[1].y then
|
|
|
|
x_s = compression.size(xe[1].y) -- y size
|
|
|
|
else
|
|
|
|
x_s = compression.size(xe[1])
|
|
|
|
end
|
2016-06-06 17:04:13 +12:00
|
|
|
else
|
|
|
|
x_s = compression.size(xe)
|
|
|
|
end
|
2016-06-02 13:11:15 +12:00
|
|
|
if x_s[2] / settings.scale > settings.crop_size + 32 and
|
|
|
|
x_s[3] / settings.scale > settings.crop_size + 32 then
|
2016-05-13 12:49:53 +12:00
|
|
|
table.insert(new_x, x[i])
|
|
|
|
end
|
|
|
|
if i % 100 == 0 then
|
|
|
|
collectgarbage()
|
|
|
|
end
|
|
|
|
end
|
2016-06-08 10:52:38 +12:00
|
|
|
print(string.format("%d small images are removed", #x - #new_x))
|
2016-05-13 12:49:53 +12:00
|
|
|
|
|
|
|
return new_x
|
|
|
|
end
|
2016-03-14 09:06:14 +13:00
|
|
|
local function plot(train, valid)
|
|
|
|
gnuplot.plot({
|
|
|
|
{'training', torch.Tensor(train), '-'},
|
|
|
|
{'validation', torch.Tensor(valid), '-'}})
|
|
|
|
end
|
2015-05-16 17:48:05 +12:00
|
|
|
local function train()
|
2017-02-12 21:46:07 +13:00
|
|
|
local x = torch.load(settings.images)
|
|
|
|
if settings.method ~= "user" then
|
|
|
|
x = remove_small_image(x)
|
|
|
|
end
|
2016-10-08 21:22:16 +13:00
|
|
|
local train_x, valid_x = split_data(x, math.max(math.floor(settings.validation_rate * #x), 1))
|
2016-03-14 09:06:14 +13:00
|
|
|
local hist_train = {}
|
|
|
|
local hist_valid = {}
|
2016-06-09 05:39:52 +12:00
|
|
|
local model
|
|
|
|
if settings.resume:len() > 0 then
|
|
|
|
model = torch.load(settings.resume, "ascii")
|
|
|
|
else
|
2017-02-12 21:46:07 +13:00
|
|
|
if stringx.endswith(settings.model, ".lua") then
|
|
|
|
local create_model = dofile(settings.model)
|
|
|
|
model = create_model(srcnn, settings)
|
|
|
|
else
|
|
|
|
model = srcnn.create(settings.model, settings.backend, settings.color)
|
|
|
|
end
|
2016-06-09 05:39:52 +12:00
|
|
|
end
|
2016-10-21 19:43:28 +13:00
|
|
|
if model.w2nn_input_size then
|
|
|
|
if settings.crop_size ~= model.w2nn_input_size then
|
|
|
|
io.stderr:write(string.format("warning: crop_size is replaced with %d\n",
|
|
|
|
model.w2nn_input_size))
|
|
|
|
settings.crop_size = model.w2nn_input_size
|
|
|
|
end
|
|
|
|
end
|
2016-12-26 00:17:47 +13:00
|
|
|
if model.w2nn_gcn then
|
|
|
|
settings.gcn = true
|
|
|
|
else
|
|
|
|
settings.gcn = false
|
|
|
|
end
|
2016-08-30 20:13:25 +12:00
|
|
|
dir.makepath(settings.model_dir)
|
|
|
|
|
2015-10-26 13:23:52 +13:00
|
|
|
local offset = reconstruct.offset_size(model)
|
2016-09-11 08:07:42 +12:00
|
|
|
transform_pool_init(reconstruct.has_resize(model), offset)
|
|
|
|
|
2016-07-09 18:05:11 +12:00
|
|
|
local criterion = create_criterion(model)
|
2016-05-28 13:34:59 +12:00
|
|
|
local eval_metric = w2nn.ClippedMSECriterion(0, 1):cuda()
|
2015-05-16 17:48:05 +12:00
|
|
|
local adam_config = {
|
2016-06-02 13:11:15 +12:00
|
|
|
xLearningRate = settings.learning_rate,
|
2015-05-16 17:48:05 +12:00
|
|
|
xBatchSize = settings.batch_size,
|
2016-09-11 23:59:32 +12:00
|
|
|
xLearningRateDecay = settings.learning_rate_decay,
|
|
|
|
xInstanceLoss = (settings.oracle_rate > 0)
|
2015-05-16 17:48:05 +12:00
|
|
|
}
|
2015-06-23 05:27:28 +12:00
|
|
|
local ch = nil
|
|
|
|
if settings.color == "y" then
|
|
|
|
ch = 1
|
|
|
|
elseif settings.color == "rgb" then
|
|
|
|
ch = 3
|
|
|
|
end
|
2016-04-17 05:08:07 +12:00
|
|
|
local best_score = 1000.0
|
2015-05-16 17:48:05 +12:00
|
|
|
print("# make validation-set")
|
2016-09-11 08:07:42 +12:00
|
|
|
local valid_xy = make_validation_set(valid_x,
|
2015-11-01 01:56:20 +13:00
|
|
|
settings.validation_crops,
|
2015-11-30 21:18:52 +13:00
|
|
|
settings.patches)
|
2015-05-16 17:48:05 +12:00
|
|
|
valid_x = nil
|
|
|
|
|
|
|
|
collectgarbage()
|
|
|
|
model:cuda()
|
|
|
|
print("load .. " .. #train_x)
|
2015-11-30 21:18:52 +13:00
|
|
|
|
2016-05-13 12:49:53 +12:00
|
|
|
local x = nil
|
2015-11-30 21:18:52 +13:00
|
|
|
local y = torch.Tensor(settings.patches * #train_x,
|
|
|
|
ch * (settings.crop_size - offset * 2) * (settings.crop_size - offset * 2)):zero()
|
2016-05-15 06:04:08 +12:00
|
|
|
if reconstruct.has_resize(model) then
|
2016-05-13 12:49:53 +12:00
|
|
|
x = torch.Tensor(settings.patches * #train_x,
|
|
|
|
ch, settings.crop_size / settings.scale, settings.crop_size / settings.scale)
|
|
|
|
else
|
|
|
|
x = torch.Tensor(settings.patches * #train_x,
|
|
|
|
ch, settings.crop_size, settings.crop_size)
|
|
|
|
end
|
2016-05-27 19:49:42 +12:00
|
|
|
local instance_loss = nil
|
2017-04-10 23:20:17 +12:00
|
|
|
local pmodel = w2nn.data_parallel(model, settings.gpu)
|
2015-05-16 17:48:05 +12:00
|
|
|
for epoch = 1, settings.epoch do
|
2017-04-10 23:20:17 +12:00
|
|
|
pmodel:training()
|
2015-05-16 17:48:05 +12:00
|
|
|
print("# " .. epoch)
|
2016-06-02 13:11:15 +12:00
|
|
|
if adam_config.learningRate then
|
|
|
|
print("learning rate: " .. adam_config.learningRate)
|
|
|
|
end
|
2016-05-27 19:49:42 +12:00
|
|
|
print("## resampling")
|
|
|
|
if instance_loss then
|
|
|
|
-- active learning
|
|
|
|
local oracle_k = math.min(x:size(1) * (settings.oracle_rate * (1 / (1 - settings.oracle_drop_rate))), x:size(1))
|
|
|
|
local oracle_n = math.min(x:size(1) * settings.oracle_rate, x:size(1))
|
|
|
|
if oracle_n > 0 then
|
|
|
|
local oracle_x, oracle_y = get_oracle_data(x, y, instance_loss, oracle_k, oracle_n)
|
2016-06-06 17:04:13 +12:00
|
|
|
resampling(x:narrow(1, oracle_x:size(1) + 1, x:size(1)-oracle_x:size(1)),
|
2016-09-11 08:07:42 +12:00
|
|
|
y:narrow(1, oracle_x:size(1) + 1, x:size(1) - oracle_x:size(1)), train_x)
|
2016-05-27 19:49:42 +12:00
|
|
|
x:narrow(1, 1, oracle_x:size(1)):copy(oracle_x)
|
|
|
|
y:narrow(1, 1, oracle_y:size(1)):copy(oracle_y)
|
2016-06-06 17:04:13 +12:00
|
|
|
|
|
|
|
local draw_n = math.floor(math.sqrt(oracle_x:size(1), 0.5))
|
|
|
|
if draw_n > 100 then
|
|
|
|
draw_n = 100
|
|
|
|
end
|
|
|
|
image.save(path.join(settings.model_dir, "oracle_x.png"),
|
|
|
|
image.toDisplayTensor({
|
|
|
|
input = oracle_x:narrow(1, 1, draw_n * draw_n),
|
|
|
|
padding = 2,
|
|
|
|
nrow = draw_n,
|
|
|
|
min = 0,
|
|
|
|
max = 1}))
|
2016-05-27 19:49:42 +12:00
|
|
|
else
|
2016-09-11 08:07:42 +12:00
|
|
|
resampling(x, y, train_x)
|
2016-05-27 19:49:42 +12:00
|
|
|
end
|
|
|
|
else
|
|
|
|
resampling(x, y, train_x, pairwise_func)
|
|
|
|
end
|
|
|
|
collectgarbage()
|
|
|
|
instance_loss = torch.Tensor(x:size(1)):zero()
|
|
|
|
|
2015-11-30 21:18:52 +13:00
|
|
|
for i = 1, settings.inner_epoch do
|
2017-04-10 23:20:17 +12:00
|
|
|
pmodel:training()
|
|
|
|
local train_score, il = minibatch_adam(pmodel, criterion, eval_metric, x, y, adam_config)
|
2016-05-27 19:49:42 +12:00
|
|
|
instance_loss:copy(il)
|
2016-03-14 09:06:14 +13:00
|
|
|
print(train_score)
|
2017-04-10 23:20:17 +12:00
|
|
|
pmodel:evaluate()
|
2015-11-30 21:18:52 +13:00
|
|
|
print("# validation")
|
2017-04-10 23:20:17 +12:00
|
|
|
local score = validate(pmodel, criterion, eval_metric, valid_xy, adam_config.xBatchSize)
|
2016-05-28 13:34:59 +12:00
|
|
|
table.insert(hist_train, train_score.loss)
|
|
|
|
table.insert(hist_valid, score.loss)
|
2016-03-14 09:06:14 +13:00
|
|
|
if settings.plot then
|
|
|
|
plot(hist_train, hist_valid)
|
|
|
|
end
|
2017-02-12 05:56:03 +13:00
|
|
|
local score_for_update
|
|
|
|
if settings.update_criterion == "mse" then
|
|
|
|
score_for_update = score.MSE
|
|
|
|
else
|
|
|
|
score_for_update = score.loss
|
|
|
|
end
|
|
|
|
if score_for_update < best_score then
|
2015-11-30 21:18:52 +13:00
|
|
|
local test_image = image_loader.load_float(settings.test) -- reload
|
2017-02-12 05:56:03 +13:00
|
|
|
best_score = score_for_update
|
2016-08-30 20:13:52 +12:00
|
|
|
print("* model has updated")
|
2015-12-04 22:49:34 +13:00
|
|
|
if settings.save_history then
|
2017-04-13 20:38:18 +12:00
|
|
|
pmodel:clearState()
|
|
|
|
torch.save(settings.model_file_best, model, "ascii")
|
|
|
|
torch.save(string.format(settings.model_file, epoch, i), model, "ascii")
|
2015-12-04 22:49:34 +13:00
|
|
|
if settings.method == "noise" then
|
|
|
|
local log = path.join(settings.model_dir,
|
|
|
|
("noise%d_best.%d-%d.png"):format(settings.noise_level,
|
|
|
|
epoch, i))
|
|
|
|
save_test_jpeg(model, test_image, log)
|
|
|
|
elseif settings.method == "scale" then
|
|
|
|
local log = path.join(settings.model_dir,
|
|
|
|
("scale%.1f_best.%d-%d.png"):format(settings.scale,
|
|
|
|
epoch, i))
|
|
|
|
save_test_scale(model, test_image, log)
|
2016-06-08 09:39:36 +12:00
|
|
|
elseif settings.method == "noise_scale" then
|
|
|
|
local log = path.join(settings.model_dir,
|
|
|
|
("noise%d_scale%.1f_best.%d-%d.png"):format(settings.noise_level,
|
|
|
|
settings.scale,
|
|
|
|
epoch, i))
|
|
|
|
save_test_scale(model, test_image, log)
|
2016-07-05 05:42:40 +12:00
|
|
|
elseif settings.method == "user" then
|
|
|
|
local log = path.join(settings.model_dir,
|
|
|
|
("%s_best.%d-%d.png"):format(settings.name,
|
|
|
|
epoch, i))
|
|
|
|
save_test_user(model, test_image, log)
|
2015-12-04 22:49:34 +13:00
|
|
|
end
|
|
|
|
else
|
2017-04-13 20:38:18 +12:00
|
|
|
pmodel:clearState()
|
|
|
|
torch.save(settings.model_file, model, "ascii")
|
2015-12-04 22:49:34 +13:00
|
|
|
if settings.method == "noise" then
|
|
|
|
local log = path.join(settings.model_dir,
|
|
|
|
("noise%d_best.png"):format(settings.noise_level))
|
|
|
|
save_test_jpeg(model, test_image, log)
|
|
|
|
elseif settings.method == "scale" then
|
|
|
|
local log = path.join(settings.model_dir,
|
|
|
|
("scale%.1f_best.png"):format(settings.scale))
|
|
|
|
save_test_scale(model, test_image, log)
|
2016-06-08 09:39:36 +12:00
|
|
|
elseif settings.method == "noise_scale" then
|
|
|
|
local log = path.join(settings.model_dir,
|
|
|
|
("noise%d_scale%.1f_best.png"):format(settings.noise_level,
|
|
|
|
settings.scale))
|
|
|
|
save_test_scale(model, test_image, log)
|
2016-07-05 05:42:40 +12:00
|
|
|
elseif settings.method == "user" then
|
|
|
|
local log = path.join(settings.model_dir,
|
|
|
|
("%s_best.png"):format(settings.name))
|
|
|
|
save_test_user(model, test_image, log)
|
2015-12-04 22:49:34 +13:00
|
|
|
end
|
2015-11-30 21:18:52 +13:00
|
|
|
end
|
2015-05-16 17:48:05 +12:00
|
|
|
end
|
2017-02-12 05:56:03 +13:00
|
|
|
print("Batch-wise PSNR: " .. score.PSNR .. ", loss: " .. score.loss .. ", MSE: " .. score.MSE .. ", best: " .. best_score)
|
2015-11-30 21:18:52 +13:00
|
|
|
collectgarbage()
|
2015-05-16 17:48:05 +12:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
torch.manualSeed(settings.seed)
|
|
|
|
cutorch.manualSeed(settings.seed)
|
|
|
|
print(settings)
|
|
|
|
train()
|