1
0
Fork 0
mirror of synced 2024-09-21 12:02:33 +12:00
waifu2x/models/anime_style_art/noise2_model.t7

1728 lines
5 MiB
Text
Raw Normal View History

2015-05-16 17:48:05 +12:00
4
1
3
V 1
13
nn.Sequential
3
2
3
2
9
gradInput
4
2016-01-23 13:36:54 +13:00
3
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
7
modules
3
2016-01-23 13:36:54 +13:00
4
2015-05-16 17:48:05 +12:00
14
1
1
4
2016-01-23 13:36:54 +13:00
5
2015-05-16 17:48:05 +12:00
3
V 1
23
nn.SpatialConvolutionMM
2015-05-16 17:48:05 +12:00
3
2016-01-23 13:36:54 +13:00
6
17
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
4
padW
1
0
2
11
nInputPlane
1
1
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
7
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2
2015-05-16 17:48:05 +12:00
9
2016-01-23 13:36:54 +13:00
gradInput
4
8
2015-05-16 17:48:05 +12:00
3
V 1
2016-01-23 13:36:54 +13:00
16
torch.CudaTensor
0
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2
2016-01-23 13:36:54 +13:00
8
gradBias
4
2016-01-23 13:36:54 +13:00
9
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
1
32
1
1
4
2016-01-23 13:36:54 +13:00
10
2015-05-16 17:48:05 +12:00
3
V 1
2016-01-23 13:36:54 +13:00
17
torch.CudaStorage
32
0 0 0 0 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186
2
2
dW
1
2015-05-16 17:48:05 +12:00
1
2
12
nOutputPlane
2015-05-16 17:48:05 +12:00
1
32
2015-05-16 17:48:05 +12:00
2
4
2016-01-23 13:36:54 +13:00
bias
4
11
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
1
32
1
2015-05-16 17:48:05 +12:00
1
4
2016-01-23 13:36:54 +13:00
12
2015-05-16 17:48:05 +12:00
3
V 1
17
torch.CudaStorage
2016-01-23 13:36:54 +13:00
32
-0.0077224751 -0.0100613907 -0.00884213299 -0.0384441912 0.0343856439 0.00808441825 0.0131392581 0.0143000046 -0.0101159262 -0.0719366595 -0.0194344521 0.00277963676 0.00797075685 0.0104597742 0.00090173108 -0.00624251971 0.0051631541 -0.0148980571 -0.0160721615 -0.00410585711 -0.00570176728 -0.0169781409 -0.00498746103 -0.0342289992 -0.00834816415 0.00497037871 -0.0297133345 0.0381713174 0.00269163516 0.000838130538 -0.0235808715 0.00315397442
2
2
kH
1
3
2015-05-16 17:48:05 +12:00
2
6
finput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
13
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
6
weight
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
14
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
2
32 9
9 1
2015-05-16 17:48:05 +12:00
1
4
2016-01-23 13:36:54 +13:00
15
3
V 1
17
torch.CudaStorage
2016-01-23 13:36:54 +13:00
288
0.0260251593 0.0745829716 0.143579707 -0.0187859032 -0.0412778929 0.0548107959 -0.0433426499 -0.172254533 -0.0235303883 0.0193158612 -0.0823137611 -0.00372310751 0.170975477 0.00795254949 -0.15225175 0.0209624004 0.0339416638 -0.0126146777 0.0364503749 -0.185733348 -0.021553142 -0.14045088 0.0560350604 0.0633481145 0.105203807 -0.00372753805 0.0932094678 -0.0867254809 0.0623910353 0.106498845 -0.0823153332 -0.092243202 0.210309997 0.025555389 0.180061758 0.157496914 -0.0585936941 0.0986851603 -0.0106168641 0.00141309341 -0.00631248392 -0.191497639 0.0264256522 -0.070744209 -0.0284198094 0.0400808677 0.0108744847 0.0380265601 0.0771037936 0.0887810662 0.0180303324 -0.02467384 -0.0783688799 -0.160539582 -0.0610968545 -0.0595726222 0.119749583 0.029085964 -0.0362342075 0.0268222783 -0.0838028938 0.0173692517 0.0441942364 -0.0410679132 0.00889093708 -0.138424531 -0.00747291185 0.00530844927 -0.0686086789 -0.030433869 0.078178674 -0.0184055958 0.0894086957 -0.0156334899 -0.0657583475 0.0582988746 -0.0089572873 0.0459240675 0.0108531527 -0.0757706538 -0.00673243636 0.000669501431 -0.0406758599 0.0214612763 0.0779650882 0.0327601768 -0.0169277601 0.00892890058 0.000875368074 -0.00258530281 -0.0189723577 0.126015127 0.0317906849 -0.0493143275 0.176733181 -0.118218392 0.0249404442 -0.0766738951 -0.0926598534 -0.03294494 -0.01195182 -0.0125941932 -0.0595388822 0.0871999413 0.0143969478 0.0551179163 -0.209275812 0.0688750669 -0.00800564233 -0.0657479987 0.229091421 0.0247057825 0.0346916243 -0.00256070937 -0.0163935926 -0.125149891 -0.0668325126 0.0440958515 -0.0715469271 -0.200888664 -0.000241854214 -0.0831552669 0.0760017782 -0.018941151 -0.102589227 0.0530684739 -0.0441872329 0.0636526048 -0.00614678394 -0.0237664264 0.0322683677 -0.0313097425 -0.0164506361 -0.0589051247 0.0187035426 -0.0116877481 0.00796504878 0.0116545185 0.0456568114 0.0146543058 -0.0136260018 0.00620803842 0.0123518743 -0.0835248753 0.0534036383 0.0119372588 -0.00672791991 0.0172390193 -0.125857234 -0.121789746 -0.00362337613 0.0418042243 -0.0203668326 0.0282676928 0.0373939797 0.0184210055 0.0474318676 0.0566503592 0.0183051713 -0.0987664089 -0.0934745297 -0.00905859284 -0.0247125588 -0.0395164117 -0.0214939099 -0.00620404864 -0.0446823537 0.0191385876 -0.0030956578 0.0459739827 0.079354614 0.129028037 0.0274710543 -0.126707137 0.0466180444 -0.0497252308 -0.025381444 0.00540154427 0.0925125033 -0.0994461477 0.0208308995 -0.0145517038 -0.0207707565 -0.0466409586 0.0584712215 0.0373148471 0.0353013836 -0.0528791584 -0.0158727672 0.0193663817 -0.0446434356 0.0320010334 0.0180916693 0.0867382511 0.0138433333 0.000483575132 -0.0530113503 -0.0722895861 -0.117657095 -0.0848281458 -0.0392626747 0.0922843292 0.0610473529 0.103356101 0.101816282 -0.0927831456 -0.0264362786 0.163280144 -0.133148193 0.0602375865 0.0447090007 0.0314949825 -0.0196663998 0.0743949637 0.047267139 0.0305514336 -0.0240699165 0.0438754484 -0.00308887754 -0.0588508062 0.103088133 0.0490265191 -0.0850914195 -0.0575501844 0.0295081753 0.0913989022 -0.0570686162 -0.0889903978 -0.0564842932 -0.0470725708 0.0740050673 -0.0301447883 -0.0902494788 0.000917112804 0.127016291 -0.0820216462 -0.0752067044 0.0629544258 -0.00230554887 0.127563953 -0.167058349 -0.0115508204 0.0504230261 -0.0112417052 -0.0244162045 -0.189017788 -0.14860712 0.0175265949 -0.0393972471 0.0216171052 -0.0501232259 -0.0682652593 -0.136124089 0.0380899124 -0.12358202 -0.052738212 0.0641552433 -0.02292284 0.116487443 -0.087376684 0.0232859887 -0.0768871531 0.131416008 0.117366798 -0.0896556526 -0.0433032066 -0.0892350674 0.0620917715 0.0299078785 -0.0419336408 -0.0318357535 0.0433624722 0.0587467588 -0.044480484 -0.00528883003 0.0409890078 -0.116329253 0.0403897613 0.0721898377 0.0105393352 -0.0484673381 -0.000530358928 0.0514967814 0.0163382869 -0.135569245 -0.0254776701 0.000854601152 0.0254341457
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
5
train
5
0
2
10
gradWeight
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
16
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2
32 9
9 1
2015-05-16 17:48:05 +12:00
1
4
2016-01-23 13:36:54 +13:00
17
3
V 1
17
torch.CudaStorage
288
0 0 0 0 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
10
fgradInput
2015-05-16 17:48:05 +12:00
4
18
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
4
padH
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2
dH
1
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
2
2
kW
1
3
2015-05-16 17:48:05 +12:00
1
2
4
19
2015-05-16 17:48:05 +12:00
3
V 1
2016-01-23 13:36:54 +13:00
14
w2nn.LeakyReLU
2015-05-16 17:48:05 +12:00
3
20
2015-05-16 17:48:05 +12:00
5
2
2016-01-23 13:36:54 +13:00
5
train
5
0
2
2015-05-16 17:48:05 +12:00
8
negative
4
21
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
22
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
23
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2016-01-23 13:36:54 +13:00
2
14
negative_scale
1
0.10000000000000001
2015-05-16 17:48:05 +12:00
1
3
4
24
2015-05-16 17:48:05 +12:00
3
V 1
23
nn.SpatialConvolutionMM
2015-05-16 17:48:05 +12:00
3
25
2016-01-23 13:36:54 +13:00
17
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
4
padW
1
0
2
11
nInputPlane
1
32
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
26
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2
2016-01-23 13:36:54 +13:00
9
gradInput
4
27
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
1
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
8
gradBias
2015-05-16 17:48:05 +12:00
4
28
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
1
2016-01-23 13:36:54 +13:00
32
1
1
4
29
3
V 1
17
torch.CudaStorage
32
0 0 0 0 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186
2
2
dW
1
1
2015-05-16 17:48:05 +12:00
2
12
nOutputPlane
2015-05-16 17:48:05 +12:00
1
32
2015-05-16 17:48:05 +12:00
2
4
2016-01-23 13:36:54 +13:00
bias
4
30
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
1
32
1
1
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
31
3
V 1
17
torch.CudaStorage
2016-01-23 13:36:54 +13:00
32
-0.00145941356 -0.000539173721 -0.0307535585 -0.00224311952 -0.00383721408 -0.0825701058 0.0201924667 0.00448255148 0.0417693853 -0.0115849292 0.00336479093 -0.0113627827 -0.015486815 -0.00589053426 0.0233386569 -0.0121470438 -0.0286187455 -0.00113070721 0.00499298703 -0.000996498042 0.0160875395 -0.0243052356 -0.00329795131 0.0392001383 -0.0321910791 -0.00639945921 0.00553135853 -0.0177684911 0.0115517573 0.00756824715 0.000522688439 -0.00378062786
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
2
kH
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
3
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
6
finput
2015-05-16 17:48:05 +12:00
4
32
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
6
weight
2015-05-16 17:48:05 +12:00
4
33
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2
32 288
288 1
2016-01-23 13:36:54 +13:00
1
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
34
3
V 1
17
torch.CudaStorage
9216
0.0546612665 -0.12151204 -0.0205522366 0.103685677 -0.00985156186 -0.00924555119 0.121930309 0.0645686164 0.0989786536 0.215004116 0.120206848 -0.0452936441 -0.0756870061 -0.034404058 0.058803577 0.0195781812 -0.107475773 0.00714681437 0.0824128985 0.0340542309 -0.0703920946 0.00711558247 -0.0604393892 -0.0486777835 -0.00235162815 -0.000671164482 0.0943582058 0.0339580476 -0.000262555375 0.117623441 -0.0435152911 -0.07536681 0.00352260028 -0.00967753865 0.0149087803 -0.020468358 0.0207525361 -0.0220080726 0.016432697 -0.0401086956 0.0333826244 -0.0116936108 0.134052292 -0.0192072298 -0.033394888 -0.0963615105 -0.156275019 0.04278633 -0.124405667 -0.0417402461 0.0447328687 0.0487137213 -0.0522516333 0.0982529372 0.0236491207 0.00921131298 -0.0779644772 -0.0220569391 -0.0565121658 0.0165031552 0.00131844415 -0.0742443055 0.0174157154 -0.0664419904 0.0234518852 0.00758816488 -0.0313935503 0.157302514 0.0890828446 0.00714784395 0.0397556573 -0.114795312 0.0823193416 0.0601362288 0.0945518687 -0.0506136902 -0.0164083838 -0.0563913547 0.0844165385 -0.0641263723 0.00834929291 -0.0603439398 0.0162124988 0.0802495331 0.0555056557 -0.00876682717 -0.00591406552 -0.126263037 -0.133730933 0.109703168 -0.0623056963 -0.0461386666 0.0840483755 0.0521250516 0.0531448126 -0.00199379167 -0.032950338 -0.0630072802 -0.140891463 0.0755667537 0.00202604174 -0.0893595368 0.0871264338 0.0319988169 -0.00948976167 0.108593032 0.00933161285 0.155044496 0.185951427 0.0719486028 -0.0438344814 0.0465134121 0.063969031 -0.0346452631 -0.0934555084 0.0118795447 -0.0457845554 -0.00407727296 -0.109156668 -0.0311918091 -0.0391744599 -0.0699257776 -0.0430241115 -0.05536009 0.0180289615 -0.0108181322 -0.124018975 -0.0562947094 0.00604107464 -0.111118875 -0.0920431018 0.0307280701 0.039173007 0.103384562 -0.0231968276 -0.104369141 -0.00408382807 -0.085862644 -0.0687780902 0.0186891742 0.00964054465 -0.046194151 0.0785055608 -0.0829831958 -0.215021595 -0.0161835551 0.0684530735 -0.00397791993 -0.103905521 -0.12822853 -0.0670190528 0.0560827218 0.098279722 0.0175518449 -0.0735762045 -0.0419748873 0.115251534 0.130230501 -0.0143355727 0.0113439336 -0.0901238844 0.0348248258 -0.12776874 0.0118176173 -0.0762690678 -0.0310491994 -0.0793473125 -0.073557049 -0.110445887 -0.0379198827 -0.00983666908 0.0689923763 -0.0175791383 0.0130967265 -0.044388257 0.112117097 0.0502131693 0.090468213 0.0210477039 -0.00484296959 0.0205301717 0.0513997674 0.0988494009 0.0222350936 0.130495712 0.00522916717 0.104085602 0.00933618937 0.0727251992 -0.119064651 -0.158151075 0.041471608 0.100581877 -0.00084185577 -0.00642960286 0.128108874 -0.101629831 -0.0143447816 -0.0619419143 0.0338814333 -0.0859425813 0.0179520268 -0.0163095184 0.138775587 0.0279007126 0.206286609 -0.034594316 -0.158030793 0.00145929365 0.0352729969 -0.0237044953 0.0137672909 -0.0850208327 0.110612608 -0.126135275 0.184046134 0.0477479622 -0.136482686 -0.144653648 0.035633076 0.133674294 -0.0384864211 0.0452219658 -0.0974880084 -0.0681634247 -0.00284088682 0.00900435168 -0.176454186 -0.078948468 -0.00797786936 -0.0642475411 0.219617322 0.0665052012 -0.072903052 0.135461167 0.0725894347 -0.148103297 0.180077806 -0.101463802 0.060536176 0.112662122 -0.0643744171 0.100498185 0.0688295886 -0.168811247 0.182976097 -0.133528352 -0.0248139668 -0.0670694262 -0.0968195722 0.176503509 0.120497145 -0.0438910946 -0.0641157404 0.000705019396 -0.0259120502 -0.0243448112 0.00536731211 -0.0168900862 0.0225797351 -0.0766674057 0.0720331967 0.117290363 -0.0799684078 0.138120145 0.00161905319 0.0314797275 -0.0250687469 0.0392634869 0.0399589613 -0.0854346007 -0.0968454257 0.019512495 -0.0585801676 0.0454470329 -0.12701267 -0.0482783504 0.162474468 0.0309739038 0.149209723 0.0767331943 0.038104862 -0.13191326 0.123281293 -0.0539599545 -0.0543979891 0.10078904 -0.0711200535 -0.0478514992 -0.117379189 -0.142752185 0.0449396111 -0.236615151 -0.159024552 -0.0676752329 -0.0931117609 0.0959341675 0.0479184948 0.128128216 -0.0206191484 -0.0221192706 0.00822796207 -0.127206892 -0.0546756387 -0.0671735853 -0.0950806662 0.00429423759 0.087523356
2015-05-16 17:48:05 +12:00
2
5
train
5
0
2
2016-01-23 13:36:54 +13:00
10
gradWeight
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
35
3
V 1
16
torch.CudaTensor
2
32 288
288 1
1
4
36
3
V 1
17
torch.CudaStorage
9216
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
2
10
fgradInput
4
37
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
4
padH
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2
dH
1
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
2
2
kW
1
3
2015-05-16 17:48:05 +12:00
1
4
4
2016-01-23 13:36:54 +13:00
38
2015-05-16 17:48:05 +12:00
3
V 1
2016-01-23 13:36:54 +13:00
14
w2nn.LeakyReLU
2015-05-16 17:48:05 +12:00
3
2016-01-23 13:36:54 +13:00
39
5
2
5
train
2015-05-16 17:48:05 +12:00
5
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
8
negative
4
2016-01-23 13:36:54 +13:00
40
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
41
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
42
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2016-01-23 13:36:54 +13:00
2
14
negative_scale
1
0.10000000000000001
2015-05-16 17:48:05 +12:00
1
5
4
2016-01-23 13:36:54 +13:00
43
2015-05-16 17:48:05 +12:00
3
V 1
23
nn.SpatialConvolutionMM
2015-05-16 17:48:05 +12:00
3
2016-01-23 13:36:54 +13:00
44
17
2
2016-01-23 13:36:54 +13:00
4
padW
2015-05-16 17:48:05 +12:00
1
0
2015-05-16 17:48:05 +12:00
2
11
nInputPlane
1
32
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
45
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
46
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
1
0
2
2016-01-23 13:36:54 +13:00
8
gradBias
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
47
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
1
2016-01-23 13:36:54 +13:00
64
1
1
4
48
3
V 1
17
torch.CudaStorage
64
0 0 0 0 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186
2
2
dW
1
1
2015-05-16 17:48:05 +12:00
2
12
nOutputPlane
1
64
2
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
bias
4
49
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
1
64
1
2015-05-16 17:48:05 +12:00
1
4
2016-01-23 13:36:54 +13:00
50
2015-05-16 17:48:05 +12:00
3
V 1
17
torch.CudaStorage
2016-01-23 13:36:54 +13:00
64
-0.000227600278 -0.0119677987 -0.00608434621 0.00520217652 -0.00390527397 0.000446480641 -0.00329284719 -0.0227060672 -0.00648227101 -0.0235811751 -0.00790384877 -0.00804865826 0.000558204018 -0.0241491795 -0.00853541307 0.00815360993 -0.00471497932 -0.00387768471 9.14330667e-05 -0.00624698261 -0.000702230784 -0.00146002125 -0.000813503924 -0.00257989974 -0.0141970571 -0.0200837851 -0.0143373627 -0.00946459174 -0.0119284401 -0.0319841616 -0.00170139864 0.0109434677 -0.0053874189 0.0102654938 -0.000499731628 -0.00611921307 0.00194280781 -0.00540269166 -0.00259791687 0.00078501465 0.00426529767 -0.00627872068 -0.00402597059 0.00186929957 -0.00699274987 0.00144044368 -0.00247015664 -0.00153243926 -0.00948763173 0.0100434069 -0.0214421898 0.00866081566 -0.00689359196 0.00475455262 -0.0015283518 0.00923243351 -0.0086256275 0.0239374638 -0.0176504813 0.00107536162 0.00634855684 -0.00736878952 0.00781062664 -0.00130552566
2
2
kH
1
3
2015-05-16 17:48:05 +12:00
2
6
finput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
51
3
V 1
16
torch.CudaTensor
2015-05-16 17:48:05 +12:00
0
1
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
6
weight
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
52
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
2
64 288
288 1
2015-05-16 17:48:05 +12:00
1
4
2016-01-23 13:36:54 +13:00
53
3
V 1
17
torch.CudaStorage
18432
-0.0568542108 -0.0407191291 -0.0579490215 -0.0578408726 0.0627383292 -0.0879044235 0.0641862527 -0.0683745742 -0.0548529029 0.00330359815 0.0758063197 -0.0655748472 0.060284391 -0.0481931046 -0.0651315525 0.0429789722 -0.0201645643 0.0373333767 -0.0619513541 0.0481677726 0.00751661509 -0.0192767344 -0.0174798854 0.0615493655 0.0571241453 0.0346719772 0.00544471201 0.0339874551 -0.11314664 0.0238234084 -0.0653112158 -0.00693013286 0.00158673769 -0.0128733208 -0.0764232576 -0.0242468249 0.0256233737 0.0614752024 0.0609976985 -0.0780516267 -0.0482053421 -0.0468244031 -0.0377164669 0.0057081324 -0.0381994583 0.112698637 -0.0211497918 0.0132541833 -0.0112925814 0.0245090444 0.0895115212 0.0601853915 0.0106723551 -0.00409021787 -0.00183115446 -0.00978236087 0.00588978175 -0.0875282139 -0.0112635652 -0.117010407 0.0120108631 0.0282696001 -0.0334488824 -0.0166215878 0.0106464513 -0.00911552086 0.0641468465 -0.0747712255 0.0456762202 0.0194180682 -0.0138715729 -0.0938898101 -0.00653101457 -0.132246703 0.0282580983 -0.0470835492 0.0468445458 -0.0453175493 -0.0105716866 0.000160059892 0.0755793825 0.0558203235 -0.104302555 0.0505486503 -0.141337574 0.158798009 -0.00629897229 0.0371667445 -0.0287929829 0.0162832271 0.0122930957 -0.118754707 0.0511490256 -0.0360558406 0.0207832679 -0.101479225 -0.0323796086 -0.00786712859 0.0230701882 0.0540106222 -0.0103587126 -0.0152751068 0.00119669689 -0.122326359 -0.018948568 0.00424227864 -0.0298424102 0.0432257466 0.0142105762 -0.0254127551 -0.0278275628 -0.0852186307 0.0442012101 0.0526160859 0.0184289236 -0.012854903 -0.0559329242 -0.0905006453 0.0182580967 -0.0509408265 0.116172679 -0.0702174008 0.0241486169 -0.0229210127 -0.024968056 0.00102901703 -0.000142003439 0.111923426 -0.065313302 0.0620783567 0.00939619634 -0.0583149418 -0.0154347355 0.0543866828 0.0279303603 -0.022860771 0.0505837165 -0.0847905204 -0.00901029631 0.0521533526 -0.1608762 0.104811884 -0.131019562 -0.0860162452 -0.0113567347 -0.0383591801 -0.0563609265 4.78975235e-05 -0.0776869655 -0.0244138148 -0.0287256613 -0.00462406175 0.0273410846 0.0832010955 -0.163965613 -0.120046392 -0.0579920895 0.0763704479 -0.0490666665 -0.0393371582 -0.0308038332 0.0144903567 -0.0764346644 0.0913277194 -0.079852879 0.118332244 -0.0239615608 -0.00194155169 -0.0450538397 -0.0367242806 -0.0635610372 -0.0734204799 -0.0505309068 -0.0551075041 0.0487435013 0.0443839468 -0.0388969332 0.0745915398 0.0340284556 -0.16676791 -0.145779014 -0.0668597445 0.0446262956 0.118271135 -0.0663947687 0.0296456143 0.026162466 0.0529057756 0.0548181012 -0.0402599089 0.1588386 -0.0179137159 0.139155015 0.00738078915 -0.0524239726 0.0137822377 0.132933974 -0.0478267223 -0.0437835343 -0.0609143004 0.0495817997 0.0764281079 -0.108728923 0.00130954036 0.0162217524 -0.0413648672 -9.92733403e-05 -0.0946829468 0.0211789291 0.0359979048 0.0708673596 -0.0524883866 -0.120594747 -0.0314236432 0.0368239284 0.0714669451 0.00504412223 0.0477354079 -0.0145232538 0.0454902649 0.108366534 -0.0718016699 -0.0782018527 -0.0162136946 0.0636916757 0.0529595762 -0.102949098 -0.085303396 -0.0431678444 -0.0070376317 -0.0116431015 0.018388113 0.00934191514 -0.0280156322 -0.0174434576 0.0336461291 0.101552606 0.0250365101 0.025363965 -0.0484622717 -0.0553894639 0.0723912716 0.0153942443 0.0578710027 -0.0703106523 0.0352053531 -0.108632252 -0.0105771935 -0.122427978 0.0569752082 -0.136845201 0.0125352433 0.0202970356 0.0975599736 0.0372191109 -0.138877228 -0.027054349 0.0479225963 0.115527689 0.0146561814 0.00613815663 0.0266253352 -0.0195995755 -0.109447896 -0.0266056508 0.0947076306 -0.100021295 -0.0946836174 0.0675920025 0.016405547 0.0379960537 -0.00477732671 -0.085013561 -0.13293992 -0.00561318314 -0.0741067305 0.0226107575 -0.0569941103 -0.0236728415 -0.11497964 0.0441488251 0.0320841633 0.0577440038 0.0333347246 -0.00757435709 -0.0460312143 0.0487401448 -0.0844789818 -0.168479323 0.0157266222 -0.0593237989 -0.031821873 -0.204284325 0.0635380894 0.0595039427 -0.0230322313 -0.0403341576 0.0534829907 -0.0867522731 0.143630907 -0.189208329 -0.073513478 -0.0599559881 0.0401786529 -0.12810
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
5
train
5
0
2
10
gradWeight
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
54
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2
64 288
288 1
2016-01-23 13:36:54 +13:00
1
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
55
3
V 1
17
torch.CudaStorage
18432
0 0 0 0 1.04900623e-16 -2.64665838e-22 1.2396886e-19 1.92373861e-16 -3.32621112e-18 -7.02937886e-09 -9.26133725e-10 2.01645688e-12 -2.68508613e-22 -1.60196474e-11 9.49863105e-11 -2.62972109e-11 9.63104625e-16 2.01128936e-09 4.7488756e-11 8.14804938e-26 -9.35680866e-09 -3.83900984e-12 3.583133e-14 2.46529854e-14 1.98484736e-11 -7.25200493e-16 -2.37400455e-09 -1.13740679e-12 3.36089163e-15 1.53921959e-15 5.25429029e-17 -4.30130175e-14 -8.65743588e-10 3.17079129e-21 -5.62089624e-15 -7.90292989e-17 -2.78762315e-17 8.75167018e-13 9.98489892e-14 -6.17963059e-12 1.98829517e-28 2.20731677e-16 9.85242466e-15 2.47890882e-19 -1.05177704e-23 4.95914754e-10 -1.8468064e-13 -2.63553679e-10 -6.60023512e-12 -1.01885104e-14 -3.28982427e-18 1.69034058e-11 5.04854513e-13 3.30330164e-12 -2.57522432e-12 7.32463377e-16 -9.85116403e-24 -4.70128714e-10 7.32449447e-14 -1.38347982e-08 -1.1960764e-11 -9.42506928e-17 3.60507793e-13 2.08735243e-12 -5.18878725e-16 -7.21409999e-15 -9.81992916e-13 -1.70718849e-18 1.67767469e-18 1.39611842e-17 1.16149454e-20 -1.23049852e-11 2.12287343e-18 5.06921889e-17 -3.69988429e-10 4.83703992e-14 2.61667917e-15 7.94672679e-17 6.83254343e-13 -4.77914167e-11 -2.76688348e-19 -3.17243722e-14 -1.1559576e-20 9.98622066e-17 1.0057535e-13 5.85706261e-10 -5.16538035e-10 -1.96639498e-13 3.59942716e-21 -5.03236411e-11 -1.31904553e-17 -1.10281128e-13 2.20637886e-14 -3.49488071e-13 1.06336503e-16 8.47155428e-15 1.60434339e-11 2.3297147e-29 -4.55863385e-12 -1.35187538e-11 2.63738163e-11 7.15710408e-11 -2.73029273e-11 2.70070499e-09 1.29223086e-12 7.5510497e-15 5.5390267e-12 5.83474326e-15 7.00376579e-14 -9.69345914e-13 1.76304311e-13 -7.88008005e-13 2.47245731e-11 -2.02383908e-11 -1.19436985e-13 -5.09505988e-15 3.31511015e-27 -1.09049939e-11 1.92479496e-12 5.42073386e-10 -1.58557268e-18 -1.31102275e-18 -8.17432344e-09 5.47788846e-17 2.37134444e-16 -3.16131973e-12 2.48834665e-14 -3.41284021e-13 -3.85579191e-16 -1.72326077e-11 -6.6995411e-12 2.38174203e-10 -6.17104431e-14 1.72394195e-12 -4.93120152e-14 6.25728717e-14 -7.88420571e-14 4.24389189e-14 3.80788269e-14 7.77277323e-19 -4.26464867e-29 3.7891999e-17 -9.27762599e-16 9.07969556e-16 -5.65027534e-19 -2.61723253e-14 1.38609234e-19 1.14020247e-17 -4.39428962e-12 1.83355224e-23 7.41945131e-19 -1.20934662e-20 -1.74250989e-17 -5.96648344e-22 -4.69848845e-17 -1.25242004e-14 1.99787342e-21 2.51796163e-14 1.3990427e-27 5.07870355e-23 -3.61712049e-10 7.40266043e-11 -3.18201441e-19 2.30062714e-12 2.62299729e-08 -9.06750938e-14 -1.28426305e-14 1.13690059e-16 4.43622068e-20 1.22062953e-14 4.26284879e-20 -1.66154823e-13 -2.01580804e-28 1.2031339e-14 2.45822862e-10 -5.99157789e-12 -4.14121822e-19 -1.64290749e-16 -2.8158218e-16 -3.60337912e-14 -1.7123956e-18 -1.01813109e-16 -3.88324858e-11 5.55161589e-14 -4.7904897e-14 1.67200699e-13 4.58785995e-11 3.10598927e-19 -2.87137763e-16 4.02349319e-17 -5.46886928e-12 -2.73680601e-12 8.35140287e-20 1.98811589e-09 4.44193469e-20 -1.97184297e-10 1.26863125e-12 -2.2249707e-13 -6.53102464e-13 -2.19765916e-12 -1.31699274e-12 9.25458495e-12 -2.14371843e-19 -5.03303066e-10 1.21305493e-10 -1.02971817e-13 3.05593279e-13 -1.92992967e-24 -1.93710109e-12 6.08468397e-12 -2.09259589e-11 8.46751902e-15 1.10589246e-10 1.79079174e-09 4.39984941e-12 1.08425083e-13 -1.32813767e-14 2.02035349e-18 -5.71855544e-22 -6.99958127e-15 -3.54793929e-12 -5.33047456e-13 -9.34814388e-19 -6.41851481e-13 -1.42401119e-10 6.15604488e-15 7.17830934e-11 -7.91188087e-11 -1.44822054e-11 8.70891086e-17 -1.03779195e-13 -1.18005068e-11 -3.56931412e-17 -4.85680329e-22 -5.61879321e-10 6.17172871e-13 5.51376957e-17 -4.9612136e-14 6.21542484e-10 1.4018203e-18 1.36080027e-14 1.9103484e-18 -6.74017574e-15 -5.14884229e-12 -2.4264902e-14 3.33061486e-12 4.89555844e-18 1.57275831e-10 2.29910292e-11 -1.62494966e-21 2.63376181e-13 3.07391486e-16 2.89572547e-21 3.38327055e-19 -2.88774171e-10 4.59502973e-14 -1.44834573e-27 5.58313284e-10 -1.17201881e-17 1.18919817e-14 6.61381192e-17 8.76206108e-10 1.79163102e-08 -2.07493356e-09 1.46355808e-14 7.61686933e-18 1.34857209e-10 6.62711752e-09 -8.07872103e-10 1.594
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
10
fgradInput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
56
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
4
padH
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2
dH
1
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
2
2
kW
1
3
2015-05-16 17:48:05 +12:00
1
6
4
2016-01-23 13:36:54 +13:00
57
2015-05-16 17:48:05 +12:00
3
V 1
2016-01-23 13:36:54 +13:00
14
w2nn.LeakyReLU
2015-05-16 17:48:05 +12:00
3
2016-01-23 13:36:54 +13:00
58
5
2
5
train
2015-05-16 17:48:05 +12:00
5
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
8
negative
4
2016-01-23 13:36:54 +13:00
59
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
60
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
61
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2016-01-23 13:36:54 +13:00
2
14
negative_scale
1
0.10000000000000001
2015-05-16 17:48:05 +12:00
1
7
4
2016-01-23 13:36:54 +13:00
62
2015-05-16 17:48:05 +12:00
3
V 1
23
nn.SpatialConvolutionMM
2015-05-16 17:48:05 +12:00
3
2016-01-23 13:36:54 +13:00
63
17
2
2016-01-23 13:36:54 +13:00
4
padW
2015-05-16 17:48:05 +12:00
1
0
2015-05-16 17:48:05 +12:00
2
11
nInputPlane
1
64
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
64
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
65
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
1
0
2
2016-01-23 13:36:54 +13:00
8
gradBias
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
66
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
1
2016-01-23 13:36:54 +13:00
64
1
1
4
67
3
V 1
17
torch.CudaStorage
64
0 0 0 0 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186
2
2
dW
1
1
2015-05-16 17:48:05 +12:00
2
12
nOutputPlane
1
64
2
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
bias
4
68
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
1
64
1
2015-05-16 17:48:05 +12:00
1
4
2016-01-23 13:36:54 +13:00
69
2015-05-16 17:48:05 +12:00
3
V 1
17
torch.CudaStorage
2016-01-23 13:36:54 +13:00
64
-0.00994083285 -0.00866007991 -0.00140617602 -0.000278606924 -0.00636992184 -0.0106512895 -0.00720125763 -0.0213922095 -0.00496625295 -0.0035911887 -0.0017489685 -0.00829330552 -0.00449281745 -0.00113383739 4.02228834e-05 -0.0423585027 -0.00221107737 -0.0212616995 -0.0104460809 -0.000343109627 -0.00382129708 -0.00224347855 -0.00608224515 -0.0249966532 -0.00974953361 -0.00272870902 -0.00500295591 -0.0103756003 -0.0547710955 -0.0012367192 -0.00359579525 -0.00467302045 0.000472412328 -0.00718466239 -0.00153053785 -0.00269557745 -0.00505454792 -0.00622239476 -0.0042318874 0.00373275368 -0.00420723995 -0.00534163835 -0.00333812181 0.00565104466 -0.00315149035 -0.00485922256 -0.00911627337 -0.00667737285 0.000465587043 -0.000245571689 -0.00744902669 0.000950172136 -0.0321917348 -0.00733052474 -0.0201809164 -0.00220747129 -0.00144803838 0.0194290709 0.000715592992 -0.00515107019 -0.00481034489 -0.0106487582 -0.00153078011 -0.0054039578
2
2
kH
1
3
2015-05-16 17:48:05 +12:00
2
6
finput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
70
3
V 1
16
torch.CudaTensor
2015-05-16 17:48:05 +12:00
0
1
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
6
weight
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
71
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
2
64 576
576 1
2015-05-16 17:48:05 +12:00
1
4
2016-01-23 13:36:54 +13:00
72
3
V 1
17
torch.CudaStorage
36864
-0.123086698 -0.00799167436 0.0450518094 -0.132717431 -0.0373789445 0.00589564862 -0.00671044458 0.0270834584 0.0254549161 -0.00436193822 -0.070896253 0.078674078 -0.0691017434 -0.108752184 -0.156234369 0.153834969 0.0257380288 0.00496366806 -0.113629974 -0.0754564628 -0.123663366 -0.060861975 -0.0162976272 0.129259735 0.11597205 0.000888039125 -0.00100015488 0.0934201553 -0.017527096 -0.0269929897 -0.031313967 -0.0427081361 0.040855106 -0.00271364558 0.0363107361 0.00478197634 0.0131636057 0.065889366 -0.0330982357 -0.0465552323 0.0153560927 0.0371419154 -0.027220929 -0.0733390898 0.0408278331 0.00316779804 0.00251459284 -0.0172798317 -0.0740490854 -0.0609934665 -0.0899415389 -0.0639671013 0.0756780878 -0.0310450047 -0.0197494626 0.0241648778 -0.0356245153 0.0276591219 -0.160796493 -0.00438188389 0.122817054 0.0802316666 -0.0367540903 -0.0387448892 0.0114075085 0.0764629617 -0.0435348824 -0.0144444164 -0.00563496118 0.00774843013 -0.0157377198 -0.0425041653 -0.0104259485 0.0466050804 -0.128274888 -0.0303923246 0.0886624008 -0.0721045285 -0.10544309 -0.0704498291 -0.0151715521 -0.0406625755 0.0109077999 -0.0787978247 0.0840082094 -0.0962501839 0.0788357407 0.078839764 0.0285302605 -0.0702689961 0.126766726 -0.0175395794 -0.0126750767 -0.0197090022 -0.00358046126 -0.0266787224 -0.202111408 -0.0953519568 -0.0930962339 -0.0539667904 -0.014771698 -0.0417811014 -0.102508612 0.0673950911 -0.0730439499 -0.0614083335 -0.0420006365 -0.016629789 0.0229485445 0.146992728 0.0372694992 0.00751154032 -0.0574376956 0.190688267 -0.0346623734 -0.0673718899 0.00438353186 0.071821779 -0.0593663938 0.017777564 -0.011282214 -0.167953983 0.0730929151 -0.137421578 -0.0228250921 -0.00120023242 0.0772970915 0.0147534069 -0.0053987205 0.0209948793 0.0326783545 0.0844723508 0.0749692395 -0.00151881226 -0.0925703272 -0.131024092 -0.0151029304 -0.0417806841 0.096126236 0.0219655316 0.0152821373 0.044343587 -0.0363094434 -0.0458188765 0.0334204286 0.0309670717 -0.00863068923 0.0157121364 -0.0355287567 -0.00616167113 -0.00806847587 0.0134486519 0.0596653037 -0.048990272 -0.0304196887 0.027170945 0.0222559441 -0.0185866188 -0.0373768993 0.0118347453 -0.0252187829 -0.0299396198 0.078874208 0.0661961064 -0.00508753909 0.0540955141 0.0135253621 0.0488985591 0.00511420425 -0.0764905959 -0.00510811945 -0.0606036745 -0.0385683216 0.0253902879 -0.0310700014 -0.0994199589 -0.0435050949 -0.027911352 0.023368435 0.00781418476 -0.035567075 0.0814287364 -0.0230482332 -0.103741936 -0.0367037281 0.0522966869 -0.0447841883 -0.16056338 -0.0282761641 -0.0683147982 0.00109156617 -0.0212666951 0.00976548344 0.00609040679 -0.0784789696 -0.00222448586 0.0261153653 -0.0574565344 0.0213281866 -0.0623761564 0.0929909274 -0.0141440285 -0.0577377453 0.0199714843 0.00513340486 -0.0783160925 0.0786013454 0.0481932685 -0.0441383384 -0.100625455 0.123390533 -0.0850662217 -0.045033671 0.0370807983 0.0554670244 0.100840166 -0.0838352889 0.0269586239 -0.0416804254 -0.0665621236 -0.0873552784 -0.0410219617 -0.0317602083 -0.0382405519 -0.0850638077 -0.050362967 0.0160035975 -0.0440186895 -0.0545338131 -0.0549778901 0.0953863785 -0.00556177413 -0.0510686561 0.0865007341 -0.0194597933 -0.101423219 -0.0236679148 -0.0305553861 0.0145303495 0.015187989 0.081179373 -0.0228258707 -0.02916421 0.0241146423 -0.00915718917 -0.0349448845 -0.0567291342 -0.0343610421 -0.0980491191 0.0733394846 -0.0418018214 -0.0788421705 -0.0333269536 -0.105705328 0.0232779905 0.00775632355 -0.0827139467 0.0781993493 -0.130866736 -0.144439995 -0.0154755721 -0.0154259531 -0.104672171 -0.0539161116 0.0302697942 -0.0254487228 0.0149882985 0.0850916132 0.0503010973 0.0285841506 0.0190949477 -0.00499490416 -0.0237014946 -0.0415127277 -0.0149448682 -0.0369340628 -0.00344579015 -0.00946778245 -0.0715858638 0.00937117171 0.054883033 -0.0166852754 0.0316458046 -0.0441901535 -0.0254612267 0.029279381 -0.0446894653 -0.0202102792 0.0594358146 0.125393659 0.0311044846 -0.110081799 -0.0667117983 0.0440069251 0.119068705 -0.00859479513 0.0167548899 -0.0297772978 0.0424766541 0.0346628092 0.0158460233 0.0454825312 0.089223139 -0.04
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
5
train
5
0
2
10
gradWeight
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
73
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2
64 576
576 1
2016-01-23 13:36:54 +13:00
1
4
2016-01-23 13:36:54 +13:00
74
3
V 1
17
torch.CudaStorage
36864
-5.56381399e-11 -1.68714855e-16 -8.55233094e-13 1.01593281e-13 3.93384799e-17 -7.63425534e-27 -3.33671944e-23 7.48170041e-17 -8.2291199e-15 3.22225536e-12 4.33833176e-16 -2.33184334e-19 -8.60262411e-17 -7.29866437e-15 -8.66258974e-12 1.23552241e-14 1.3576301e-13 3.84560772e-10 -8.75875944e-15 -4.08091367e-13 4.73738976e-17 -2.20377935e-16 -5.70767798e-14 -6.51474724e-14 3.6572728e-16 -1.10839007e-12 6.12540834e-12 -5.18771152e-16 7.30590629e-19 2.42884061e-19 -4.71468878e-18 8.72220134e-17 7.32384486e-10 2.22103282e-12 4.56732024e-14 5.13151465e-14 -3.10468936e-13 -3.05984459e-13 5.70458259e-14 2.04536354e-16 -4.76874728e-20 2.39186011e-14 2.36467833e-12 4.39981765e-29 -6.15627765e-18 -1.60695648e-13 9.58606329e-15 1.0584306e-11 -2.2500651e-13 6.00949441e-22 -4.44325739e-12 -3.30881535e-13 2.95857283e-10 -4.77626627e-22 6.51496305e-12 -3.82103908e-13 8.66620478e-16 -1.84134101e-19 1.50185772e-13 3.16401375e-16 -1.51100238e-15 7.10993144e-20 8.9432834e-13 -1.22788416e-15 1.60997539e-12 -7.57640796e-16 1.97317807e-15 -2.1836971e-15 -6.49615571e-15 3.21055463e-24 -1.61585224e-11 1.71821303e-12 -1.00480757e-14 7.95253922e-18 1.65456349e-23 1.68338282e-12 6.04816896e-14 1.43050638e-13 5.44184313e-14 -1.75815807e-12 7.19503537e-12 -6.46373717e-19 -7.14097611e-17 -1.42839057e-11 -8.58897338e-15 1.31611794e-16 -1.92653475e-21 -7.45727641e-18 -5.13671815e-17 -3.52496268e-17 1.64462187e-14 -3.62668446e-12 1.2058234e-14 -8.47311604e-19 8.04351702e-13 2.11539736e-14 4.79037432e-17 -1.10717997e-14 1.10906336e-11 -4.85218483e-16 -1.81220482e-17 -1.04953171e-10 -1.38706541e-14 3.92917903e-12 -9.57522127e-15 -1.92209219e-16 9.75540948e-17 1.28646102e-19 7.25793225e-15 -4.31333082e-18 -1.63099397e-16 -4.97603341e-21 1.81970285e-16 -1.69351556e-19 1.19279891e-17 1.30148742e-18 -1.9214998e-13 3.39526777e-19 -1.10967537e-13 3.31509756e-20 -2.53902226e-14 4.19245378e-19 3.13862765e-15 -5.37137134e-14 -1.56293688e-18 3.17896046e-13 1.54219733e-15 1.90629827e-17 -5.12536841e-15 -8.38441416e-21 2.09144666e-14 -6.40701532e-19 5.32524386e-17 -2.89474484e-15 3.64572721e-15 -6.28189495e-18 -3.52608932e-15 8.76946611e-18 3.75182437e-12 4.38037126e-14 -4.05673723e-19 -1.20286472e-11 -4.87923927e-15 4.80767748e-16 -6.01966554e-21 1.99686992e-17 -3.06441894e-12 -1.07270306e-15 1.56233533e-11 -1.01864911e-14 -6.09762501e-12 -8.24653795e-14 -4.36358116e-10 1.68732394e-23 4.18022262e-26 -1.20028719e-14 6.59982347e-18 7.45476577e-15 -1.23814506e-13 2.26768575e-14 -1.97679999e-15 2.53113468e-11 -2.19449736e-14 9.77630113e-14 -1.23818631e-17 -2.17129035e-16 -4.23068785e-12 -3.34745237e-17 -3.60713046e-14 -5.23041964e-14 -7.95687132e-20 -1.47050059e-12 -5.71641606e-12 -6.92392514e-14 4.02826172e-17 -3.10245881e-16 1.10479214e-28 -5.02659806e-21 4.90117034e-21 -2.83748913e-23 -1.4923802e-16 8.06809966e-18 5.94075558e-20 4.51309533e-23 -7.21174829e-14 1.79778553e-10 6.37490655e-16 -1.21429969e-18 4.99226825e-22 -2.27931389e-11 3.45889254e-18 -2.59330101e-12 -8.50445714e-20 2.33053899e-14 -2.96711533e-15 2.44305181e-15 -5.83295342e-17 -3.068195e-11 -1.16211379e-22 4.03019197e-19 4.59527351e-17 6.52322324e-18 -8.93642126e-17 -1.13166446e-21 5.2389762e-17 -8.9715996e-17 -2.09096672e-12 -1.79172887e-12 3.67746046e-15 -7.83484931e-13 -1.46478963e-17 4.93587376e-12 2.23466672e-13 -7.99895496e-14 -1.05281561e-20 1.82940333e-13 -6.3397299e-14 -1.98539678e-14 -1.035666e-14 1.02564692e-16 -3.49251801e-15 -6.96164944e-15 -3.59709366e-29 1.82633466e-12 4.99440373e-14 1.0129148e-12 -2.61715129e-11 -4.2408301e-18 2.62746457e-11 3.42374864e-19 5.92716493e-16 1.00420042e-14 2.3465208e-14 5.18141899e-17 -1.10073676e-14 1.85649902e-16 -5.50507358e-21 9.74036432e-16 2.54907552e-14 -3.44939543e-17 -1.10194861e-12 -3.61278134e-16 2.96320364e-25 -4.86099767e-13 2.62227136e-14 -7.28118945e-16 -9.69766073e-22 -1.33836576e-22 1.60339711e-16 -4.13080473e-11 3.13280213e-24 2.35704642e-12 7.73790571e-18 -1.45223885e-11 2.51682678e-19 -6.073767e-19 -2.70822557e-12 -9.95042976e-15 1.50057101e-16 8.32028782e-13 -3.62418483e-13 4.33400332e-13 8.85154004e-15 -2.42198607e-18 -5.54085111e-10 4.
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
10
fgradInput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
75
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
4
padH
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2
dH
1
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
2
2
kW
1
3
2015-05-16 17:48:05 +12:00
1
8
4
2016-01-23 13:36:54 +13:00
76
2015-05-16 17:48:05 +12:00
3
V 1
2016-01-23 13:36:54 +13:00
14
w2nn.LeakyReLU
2015-05-16 17:48:05 +12:00
3
2016-01-23 13:36:54 +13:00
77
2015-05-16 17:48:05 +12:00
5
2
2016-01-23 13:36:54 +13:00
5
train
5
0
2
2015-05-16 17:48:05 +12:00
8
negative
4
2016-01-23 13:36:54 +13:00
78
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
79
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
80
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2016-01-23 13:36:54 +13:00
2
14
negative_scale
1
0.10000000000000001
2015-05-16 17:48:05 +12:00
1
9
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
81
2015-05-16 17:48:05 +12:00
3
V 1
23
nn.SpatialConvolutionMM
3
2016-01-23 13:36:54 +13:00
82
17
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
4
padW
2015-05-16 17:48:05 +12:00
1
0
2
11
nInputPlane
1
64
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
83
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
84
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
1
0
2
2016-01-23 13:36:54 +13:00
8
gradBias
4
2016-01-23 13:36:54 +13:00
85
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
128
1
1
4
86
3
V 1
17
torch.CudaStorage
128
0 0 0 0 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186
2
2
dW
1
1
2015-05-16 17:48:05 +12:00
2
12
nOutputPlane
1
128
2
4
2016-01-23 13:36:54 +13:00
bias
4
87
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
1
128
1
1
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
88
2015-05-16 17:48:05 +12:00
3
V 1
17
torch.CudaStorage
2016-01-23 13:36:54 +13:00
128
0.00176442775 -0.0247833692 -0.00319119287 -0.00236230297 -0.00165792787 -0.00451174146 -0.0139774485 -0.0041514826 -0.00464342441 0.00129777344 -0.00193651638 -0.0022980608 -0.00178955228 -0.0035620071 -0.00309439306 -0.00526283914 -0.00523072248 -0.0033240749 -0.0061455504 -0.00282934727 -0.00258207205 -0.000936019293 -0.00124301785 -0.00670187594 2.38699267e-05 -0.000260756235 -0.00181689952 -0.00291803665 -0.00584822102 -0.00350338803 -0.00238338229 -0.00319803017 -0.00166887639 -0.0027798363 -0.00376898609 -0.00327047985 -0.00513926335 -0.00334327109 -0.00368343247 -0.00296321441 0.0103204222 -0.00587829016 -0.00117257203 -0.00254768506 -0.00229110382 -0.00493816426 -0.00683936384 -0.00180563412 -0.00468804454 -0.00290533481 -0.00200566323 -0.0043910956 -0.00134562945 -0.000995952054 -0.00178203615 -0.00369802816 0.000444132223 -0.0039681904 -0.00320997601 -0.000312022807 -0.00530572282 -0.00294841896 -0.00397414621 -0.00523273041 -0.00353114889 -0.00328740571 -0.00489110965 -0.00361847552 -0.00258239405 -0.0041696569 -0.00366433873 -0.00513112452 -0.000751825282 -0.00445938623 -0.00184308086 -0.000617297075 -0.00863341708 -0.00261991238 -0.00102429173 -0.00142355042 -0.00431333203 -0.00434967782 -0.000374521798 -0.00953623094 0.00262169982 -0.00407811301 -0.00115267793 -0.00160277344 -0.00260490039 -0.00268476759 -0.00473463582 -0.00207012962 -0.0046644262 -0.00532316184 -0.00769242505 -0.00146937638 -0.00257701823 -0.00516091939 -0.00272258022 -0.00324270409 -0.00411526999 -0.0042573195 -0.00331943226 -0.00438925251 -0.00299480208 -0.00477332855 -0.00118363579 -0.00337735261 -0.00564004201 -0.00628838036 -0.00534396293 -0.00322056352 -0.00375467283 -0.00306952326 -0.00397758512 -0.00504781678 -0.00248931884 -0.00579822063 -0.00561810704 -0.000825988071 -0.00100271148 -0.0024904774 -0.00452332944 -0.00100195105 -0.00267283199 -0.00328957662 -0.00327378698 -0.00165556045
2
2
kH
1
3
2015-05-16 17:48:05 +12:00
2
6
finput
4
2016-01-23 13:36:54 +13:00
89
3
V 1
16
torch.CudaTensor
0
2015-05-16 17:48:05 +12:00
1
0
2
2016-01-23 13:36:54 +13:00
6
weight
4
2016-01-23 13:36:54 +13:00
90
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
2
128 576
576 1
1
4
2016-01-23 13:36:54 +13:00
91
3
V 1
17
torch.CudaStorage
73728
0.0071215732 0.0239538234 0.0401674211 -0.00991264731 -0.0180334151 0.0997132212 0.0190507099 -0.0193304606 0.0682813153 0.0333567709 -0.011158824 0.00643295422 -0.0296610035 -0.019845495 0.0194225628 -0.00851271208 0.0428271107 -0.0871597826 0.0279362388 -0.00191837724 -0.0987980664 0.00040372592 0.0142299663 -0.00277320854 -0.0333833694 -0.0146735907 0.00714197103 0.0481915027 0.0949747413 0.0187915843 -0.014496237 0.00660727546 0.0483785868 -0.00596819585 0.088907674 0.125871867 -0.0149405282 0.0200897623 -0.021732986 0.0240795072 -0.0591405854 -0.0391034298 -0.0772235766 0.0341672711 -0.0505084805 -0.0806960762 0.00956192985 0.05807418 -0.055279918 -0.0306815896 0.076951161 0.0103240637 -0.0402911194 0.054674156 -0.0808827877 0.0237124171 0.0317389555 -0.0228409134 0.0487687811 -0.0201005768 0.0187764298 -0.0279581565 0.0111144483 -0.0462967604 0.0446058251 -0.016757505 -0.0245854035 -0.0398827344 0.010309753 0.0315563828 0.0466361791 -0.0168568082 -0.0666838512 -0.0512105711 -0.032591112 -0.0393337011 0.0230133384 -0.0206212942 0.0144707849 0.014794521 -0.00735080102 -0.0946343318 -0.0293072052 -0.0198525786 -0.0908672512 0.100950047 0.0278729945 -0.0477057882 -0.0151089467 -0.0722381696 0.0776312873 0.0293891244 -0.00388169102 0.0230098236 0.0430329144 -0.046198111 0.0180721618 0.0177969746 0.0131668374 0.0248134211 -0.0117758363 -0.0170749761 0.0111543341 0.0299093314 -0.0678629354 0.0356950574 -0.0107027916 -0.0249004774 -0.125352204 -0.0374091454 -0.0535587892 -0.0531365536 -0.0154403057 -0.114775859 -0.020651225 -0.0158429369 0.0394631065 -0.0512137525 0.00754058361 0.00222101342 -0.0357741453 0.0127738109 0.0337989926 -0.0649947748 -0.00762665877 -0.0612696745 0.0245407559 0.0392311998 -0.0321323052 -0.0194186699 0.0203879569 -0.0325292163 0.00635038689 0.0282745752 -0.0795635805 -0.0160304848 0.0122876177 0.0714289173 0.0245948192 0.000937077857 0.0573974662 -0.00335979043 -0.00938066095 0.0402317755 -0.0412910134 0.0212825686 0.00468308246 -0.0121439267 -0.0756207928 0.0200240053 0.00429759454 0.0411542915 -0.00726000406 -0.0120847924 0.0290222671 0.0129178613 0.0222994462 -0.00266836747 0.0430140123 0.00727527682 -0.0145296864 0.00313200708 -0.0976430029 -0.00909055863 0.0175417997 0.0233522747 0.0148620196 -0.0757904127 0.000400925113 0.0113805793 -0.0285453144 0.010826393 -0.0215216167 -0.0233575888 -0.00662972592 -0.0240315683 -0.0299265329 -0.00396807818 -0.0280331299 0.0584150292 -0.0348567031 -0.0328928344 0.0224715155 -0.0271075387 -0.0676439032 0.0391325131 -0.0342605114 -0.0486776233 0.052912347 0.0334697329 0.00556105422 0.0417792872 -0.00743500609 -0.0254132748 0.0739508793 0.0169742983 0.0344588906 0.0403918326 0.00826324616 0.049639903 0.0503447279 0.0438185856 -0.0623561069 -0.0088064773 0.108076863 -0.00306636235 0.0789362192 -0.00449978234 -0.0497183241 0.0302636251 -0.0926390141 0.00526912324 -0.0155764874 -0.00625658594 0.0164348073 0.0370475128 0.00476147374 -0.0730928183 -0.0926733464 0.0282644983 0.028734114 -0.122052275 0.0825596228 -0.0241573751 -0.0783458129 -0.0639034063 -0.015198905 -0.0522371419 -0.108863175 -0.00774472021 -0.0302909855 -0.0328189544 0.0898846686 -0.00987209566 0.0901687592 0.0227560792 0.00729011139 -0.00685594743 0.00979112647 -0.0201364346 0.00315717212 0.00108376425 0.0379832424 -0.0579843111 -0.0668634549 -0.0250309836 0.0186270345 -0.0193707943 -0.026245648 0.0291291606 -0.00280497293 0.0136371832 -0.0434783213 0.049515564 0.0456482843 -0.0393111855 -0.0194336008 0.114716306 -0.0153742498 0.0351123959 0.10061302 0.0909367427 -0.012048373 0.0102521116 0.0118519003 -0.0376734845 0.0214885063 0.0558207817 0.0593498014 0.0424326025 -0.0189164765 -0.0079634143 0.00977182388 -0.016159324 -0.0196719468 -1.73011904e-05 0.0309673753 -0.0735837668 0.0799907222 0.0192821976 0.0158821344 -0.0363094546 0.0286242273 -0.0128960256 0.024583621 0.0236434005 0.00606334209 -0.0589543357 0.000772772008 0.0906461403 0.033207152 0.0418533385 -0.00776999025 0.0264228359 0.0343448669 0.0250273962 0.0212225337 -0.0670018792 -0.0348484032 -0.016043108 -0.0457272269 -0.0303594489 0.0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
5
train
5
0
2
10
gradWeight
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
92
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2
128 576
576 1
2016-01-23 13:36:54 +13:00
1
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
93
3
V 1
17
torch.CudaStorage
73728
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
2
2016-01-23 13:36:54 +13:00
10
fgradInput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
94
3
V 1
16
torch.CudaTensor
0
2015-05-16 17:48:05 +12:00
1
0
2
2016-01-23 13:36:54 +13:00
4
padH
1
2016-01-23 13:36:54 +13:00
0
2
2
dH
1
1
2016-01-23 13:36:54 +13:00
2
2
kW
1
3
2015-05-16 17:48:05 +12:00
1
10
4
2016-01-23 13:36:54 +13:00
95
2015-05-16 17:48:05 +12:00
3
V 1
2016-01-23 13:36:54 +13:00
14
w2nn.LeakyReLU
2015-05-16 17:48:05 +12:00
3
2016-01-23 13:36:54 +13:00
96
5
2
2015-05-16 17:48:05 +12:00
5
2016-01-23 13:36:54 +13:00
train
5
0
2015-05-16 17:48:05 +12:00
2
8
negative
4
2016-01-23 13:36:54 +13:00
97
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
98
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
99
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2016-01-23 13:36:54 +13:00
2
14
negative_scale
1
0.10000000000000001
2015-05-16 17:48:05 +12:00
1
11
4
2016-01-23 13:36:54 +13:00
100
2015-05-16 17:48:05 +12:00
3
V 1
23
nn.SpatialConvolutionMM
2015-05-16 17:48:05 +12:00
3
2016-01-23 13:36:54 +13:00
101
17
2
2016-01-23 13:36:54 +13:00
4
padW
2015-05-16 17:48:05 +12:00
1
0
2015-05-16 17:48:05 +12:00
2
11
nInputPlane
1
128
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
102
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
103
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
1
0
2
2016-01-23 13:36:54 +13:00
8
gradBias
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
104
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
1
2016-01-23 13:36:54 +13:00
128
1
1
4
105
3
V 1
17
torch.CudaStorage
128
0 0 0 0 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186
2
2
dW
1
1
2015-05-16 17:48:05 +12:00
2
12
nOutputPlane
1
128
2
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
bias
4
106
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
1
128
1
2015-05-16 17:48:05 +12:00
1
4
2016-01-23 13:36:54 +13:00
107
2015-05-16 17:48:05 +12:00
3
V 1
17
torch.CudaStorage
2016-01-23 13:36:54 +13:00
128
-0.0233762767 -0.0163145363 -0.00850441959 -0.0198656898 0.00438947324 -0.0114613064 0.00404496538 0.012809339 -0.0173027627 -0.00143417157 -0.00741974264 0.0165997893 -0.0236613881 -0.00895354338 -0.0172360055 -0.00313056353 0.0126926005 -0.0203433204 -0.00733395852 -0.0170415733 -0.0239583012 -0.0207338501 0.0162131954 0.0147002311 -0.00134657626 -0.0148131065 -0.0140713667 -0.00415334292 -0.00676552625 0.0128855826 -0.0269325171 -0.0200835243 -0.000934978947 -0.0142893596 -0.0203535836 -0.021563407 -0.0213411078 -0.0174087379 -0.0195610803 -0.00399893289 -0.00172087166 -4.18069103e-05 0.0129658552 -0.0230993722 -0.00209603342 -0.00892590079 -0.0201012623 -0.00619624741 -0.0165243 -0.00426845532 -0.0134875011 -0.00232241163 -0.00222666841 -0.00926363654 -0.0200868566 -0.0127579151 -0.00276238867 -0.019809315 -0.00615866203 -0.0216793697 -0.0177740715 -0.0125512537 -0.00677247066 -0.0215030462 0.0170415733 -0.0205033049 -0.00179198547 -0.0229064282 -0.0176045597 -0.00493198773 -0.00157620292 -0.00248197257 -0.0216286965 -0.00475577172 -0.00701354165 0.0195216462 0.0191345122 -0.0198941175 -0.0175988432 -0.000298684608 -0.00429408159 -0.0202089828 0.00701996405 0.00856151432 0.000123288206 -0.00342660025 -0.00504735112 -0.020480413 -0.0195619669 -0.00862235017 -0.00393751869 -0.014851436 0.00912931748 -0.00477441307 -0.00401287898 -0.0214798376 -0.0200906638 -0.0173691977 0.00202029408 -0.00987205748 -0.0116455741 -0.0179576725 -0.00581091922 -0.00629882282 0.00486659957 -0.0207890589 -0.022852229 -0.00208834745 -0.00161722617 -0.002722156 0.00227727392 0.00459853001 -0.0238885768 -0.0339713469 0.0185853429 -0.0226250272 -0.0202792529 0.0056343046 -0.00364561216 -0.0270846616 0.017423356 -0.00311476435 -0.0098374607 0.00127808948 -0.000409438944 0.0128426692 0.00408197008 -0.0129718278
2
2
kH
1
3
2015-05-16 17:48:05 +12:00
2
6
finput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
108
3
V 1
16
torch.CudaTensor
2015-05-16 17:48:05 +12:00
0
2016-01-23 13:36:54 +13:00
1
0
2015-05-16 17:48:05 +12:00
2
6
weight
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
109
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2
128 1152
1152 1
2016-01-23 13:36:54 +13:00
1
4
2016-01-23 13:36:54 +13:00
110
3
V 1
17
torch.CudaStorage
147456
-0.0140840476 0.000815596664 0.00736520765 0.027001515 0.0679475591 -0.00152540684 -0.0426926054 -0.0105626667 -0.0345696472 0.0120960632 -0.0515445769 -0.00762418285 0.0526419766 -0.0633652881 -0.0049779457 -0.0143599147 -0.0497751273 -0.0258430429 -0.0461941622 -0.0444523394 0.00640560361 -0.052315481 0.0228900686 -0.00933015533 0.0305835512 -0.00474111224 0.0488959663 -0.040573474 0.0329749398 0.0139183328 -0.0766572654 -0.0545016378 -0.0525961779 -0.0287802797 0.0116806235 0.0128792142 -0.0207877904 -0.0424753577 -0.000690940127 -0.0480621085 -0.00801455695 -0.0148049593 0.0284303054 -0.0316083133 0.0204519629 0.0166178495 -0.071864821 -0.0928750187 -0.023839362 -0.0693325251 0.00891482737 0.0107332692 -0.0551499724 0.0637483522 -0.00568146352 -0.0881387964 0.0248362981 -0.0464411788 -0.0330175273 0.0140563101 -0.0918263867 0.00695062988 -0.0198657792 -0.00338183087 0.00684990268 -0.0440988317 -0.00527615938 0.0299953297 -0.0718073696 -0.067883037 0.0017351862 -0.00666983938 -0.0492134355 -0.0212279968 0.0352815278 0.00354124606 -0.0858179405 -0.017507948 0.060559988 -0.0767516494 0.00288289413 -0.0426256247 -0.0504361726 -0.068982996 0.0128999706 0.0250101332 0.0422711521 -0.0196711626 -0.0389805771 -0.0423543379 -0.101848587 -0.0516619347 0.101814047 0.015503102 -0.0139289945 -0.000734607573 -0.0206858888 0.0334502831 -0.00794458855 0.0039174445 0.067960836 -0.0104950555 -0.0129374294 -0.0395345241 -0.00903383829 -0.0564632341 -0.0460081547 -0.0715264305 -0.0592664629 -0.0510870405 -0.0289195869 -0.0264850277 0.049083773 0.00516729662 -0.0165935867 -0.00919703022 -0.0131829614 -0.0129299061 0.051785022 0.0166668706 -0.0306743458 -0.036050342 -0.0629805028 -0.0236403439 -0.00878582709 0.0375803821 0.00723510049 0.0523622148 -0.021382438 0.0038959668 0.0266477372 -0.0588542745 -0.0677319393 -0.093980886 -0.0680450797 0.0438386984 0.00645028334 -0.0725912824 -0.0160357933 -0.0358068794 0.0413888879 0.0180257931 0.0276067667 0.056621477 -0.11550393 0.0413449705 0.0329777487 -0.0161376875 0.0311571751 -0.0298367143 0.00773478812 -0.0141062913 0.00872194581 0.0400806554 0.00811479241 -0.0482382923 0.0375513919 -0.0159338582 0.00503458362 -0.0644658953 -0.0300942827 0.0222273711 0.0212256126 -0.0500049032 -0.0504578203 -0.0126308575 0.0257778838 -0.0413755774 0.0541437827 -0.00287965708 0.0719299167 -0.0686761886 -0.0277996659 -0.0437180139 -0.0190081187 -0.0176053103 -0.0240075458 0.00307803717 -0.025721563 -0.0619694702 0.0186835323 -0.0850578174 -0.0213495772 0.00316315936 -0.0183482151 0.0635774955 0.0684968382 0.087905407 -0.0105576543 -0.0653298795 0.0314791985 0.0214031897 -0.00427500252 -0.0290123988 0.0682449639 -0.00545103103 -0.0263270158 -0.0112097124 -0.0757516101 -0.0301900879 -0.0144633334 0.0372047573 -0.0357056931 -0.0259097517 -0.0252466146 0.00866512582 0.0521499068 -0.0489207879 -0.0478255004 0.0217150152 -0.00678784773 -0.0243727081 0.00210038782 -0.0361031406 -0.0120313587 0.0536973812 -0.0212312937 0.00931724999 0.0342612565 -0.0358005427 9.41051039e-05 -0.00955303945 0.049938634 -0.0270495228 -0.0811559334 -0.0643090084 -0.0433162749 -0.0126999402 0.087773934 0.0558940172 0.0277275201 0.00913742185 0.016644014 -2.87849216e-05 0.0388498642 0.0301712696 -0.0554809012 -0.00437483937 0.0373397283 0.0313286632 -0.0197947621 -0.075108394 0.0596602745 0.00843294337 0.0535075553 0.00473073311 -0.0173858851 0.0176291242 0.0518112406 0.0159965456 0.010614844 -0.0233934745 -0.0712998435 -0.0415247977 -0.0361117795 -0.0431206934 -0.0865798146 -0.0201869849 -0.0458733812 0.0133423079 0.0504898801 -0.0498477444 0.0263807662 -0.0985195413 -0.0233407114 0.00442402577 0.0339165926 -0.0605053827 -0.00476336665 -0.018301487 -0.0284097847 -0.0248662978 0.00640913239 0.0119155385 0.0172137897 -0.0380209684 0.0457815491 0.0533571802 0.0236209501 0.0101116961 0.0469210781 -0.0144924996 -0.0704370812 0.0117767099 0.0114135845 0.022056045 -0.0378073379 -0.022284219 -0.00522661675 -0.0165724847 0.00390864303 -0.0528328195 -0.0785012245 0.00723012863 0.0144499689 0.0293390173 0.0618879423 0.0558521561 -0.0202685595 0.053829
2015-05-16 17:48:05 +12:00
2
5
train
5
0
2
2016-01-23 13:36:54 +13:00
10
gradWeight
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
111
3
V 1
16
torch.CudaTensor
2
128 1152
1152 1
1
4
112
3
V 1
17
torch.CudaStorage
147456
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
2
10
fgradInput
4
113
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
4
padH
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2
dH
1
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
2
2
kW
1
3
2015-05-16 17:48:05 +12:00
1
12
4
2016-01-23 13:36:54 +13:00
114
2015-05-16 17:48:05 +12:00
3
V 1
2016-01-23 13:36:54 +13:00
14
w2nn.LeakyReLU
2015-05-16 17:48:05 +12:00
3
2016-01-23 13:36:54 +13:00
115
5
2
5
train
2015-05-16 17:48:05 +12:00
5
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
8
negative
4
2016-01-23 13:36:54 +13:00
116
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
117
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
118
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2016-01-23 13:36:54 +13:00
2
14
negative_scale
1
0.10000000000000001
2015-05-16 17:48:05 +12:00
1
13
4
2016-01-23 13:36:54 +13:00
119
2015-05-16 17:48:05 +12:00
3
V 1
23
nn.SpatialConvolutionMM
2015-05-16 17:48:05 +12:00
3
2016-01-23 13:36:54 +13:00
120
17
2
2016-01-23 13:36:54 +13:00
4
padW
2015-05-16 17:48:05 +12:00
1
0
2015-05-16 17:48:05 +12:00
2
11
nInputPlane
1
128
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
121
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
122
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
1
0
2
2016-01-23 13:36:54 +13:00
8
gradBias
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
123
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
1
1
1
1
4
124
3
V 1
17
torch.CudaStorage
2015-05-16 17:48:05 +12:00
1
0
2
2016-01-23 13:36:54 +13:00
2
dW
1
1
2
12
nOutputPlane
2015-05-16 17:48:05 +12:00
1
1
2
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
bias
4
125
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
1
1
1
2015-05-16 17:48:05 +12:00
1
4
2016-01-23 13:36:54 +13:00
126
3
V 1
17
torch.CudaStorage
2016-01-23 13:36:54 +13:00
1
0.0186684243
2
2
kH
1
3
2015-05-16 17:48:05 +12:00
2
6
finput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
127
3
V 1
16
torch.CudaTensor
2015-05-16 17:48:05 +12:00
0
1
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
6
weight
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
128
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
2
1 1152
1152 1
2015-05-16 17:48:05 +12:00
1
4
2016-01-23 13:36:54 +13:00
129
3
V 1
17
torch.CudaStorage
1152
-0.963831186 -0.239543021 -0.392372191 0.0169094969 -0.0825418308 0.80127275 0.277926862 0.584152281 -0.654202342 -0.606416702 0.966531515 -0.0502211601 0.528424382 -0.170983225 -0.988026917 -0.279274702 -0.169242784 0.630166113 -0.061001651 0.0524165705 0.0901853517 -0.0703493431 -0.457276344 0.482367277 -0.50578326 0.83537811 -0.388984889 -0.142590895 0.112026274 0.125932127 -0.56356293 -0.177220657 -1.16467607 -0.185868576 0.704931676 0.474912584 -0.436037779 -0.578921318 0.297802508 0.903966784 -0.213216797 -0.614824414 -0.294140339 1.0109489 -0.0882193446 -0.432685584 0.446976006 -0.233470395 -0.0175226741 0.264743328 -0.0319866762 0.175039217 -0.321988732 0.139795184 0.385610372 0.46362403 -0.70805639 -0.0702007636 -0.712521434 -0.297490627 0.230235234 0.534135401 0.173290476 0.640482783 -0.361955851 -0.138622925 0.233216226 0.503644824 0.475132227 0.63255322 0.456926197 0.854619384 -0.137017578 0.448481053 -0.327210188 0.0946894586 -0.221160337 -0.987258315 -0.279421896 -0.591287553 -0.129476905 -0.0346057452 0.312118232 0.0229848865 0.055094298 -0.16104205 -0.166836739 0.0166274551 0.312928289 -0.319935888 -0.463120848 0.0136599066 -0.192210451 0.849716425 0.136264205 -0.858602703 0.0802066848 0.247466952 0.185218826 -0.813678205 0.0273954663 -0.0625172183 -0.218663737 0.287598759 0.730290949 0.403997332 0.358145297 0.167190045 -0.432581961 0.68693608 0.0503320172 -0.146386534 -0.37652415 0.738424778 -0.358554453 -0.465571582 -0.462264538 -0.0403943621 1.1470139 -0.488420069 -0.239532486 0.3405191 -0.295519829 -0.0389212519 0.258525342 0.412889928 -0.848228931 0.0406404622 -1.07155573 0.0884323195 0.0755794048 0.115181603 0.0696584657 0.31592375 0.275427341 0.255664676 -0.435464948 -0.0538140051 0.254223078 0.541926146 0.137931809 -0.838124812 0.63604331 -0.600673497 0.87717396 0.327245891 -0.496297896 -1.41616166 0.857842445 -0.113131456 -0.253604054 0.433080524 0.403664291 -0.0609152131 0.964936912 -0.296705216 0.595216632 0.133865565 -0.719004154 -0.395556599 -0.454242915 -0.733320534 -0.151240915 0.00249768212 0.107026704 0.258539468 -0.151040033 0.502023697 -0.182118937 -0.0154995443 -0.388426483 1.07662261 -0.673247397 0.071785897 0.325375259 -0.527948678 0.473130345 -0.628595829 -0.519084454 0.273315579 -0.573245466 0.339427739 -0.262865692 -0.226882771 -0.119405702 -0.618882954 0.103889033 0.406603724 0.22169973 0.353638172 -0.566979289 0.170556858 0.155539274 -0.0389143676 -0.56736201 -0.644875884 0.038403105 -0.796278656 -0.765754521 -0.0750262067 0.220077261 0.400575489 -0.0319772586 0.321452349 -0.217612848 0.482694864 0.395932972 0.269808114 -0.122475192 -0.412174225 -0.583616018 -0.117460765 0.156716272 0.741551459 0.584565938 0.111814491 -0.155880943 -0.147130907 0.190350622 0.296389312 -0.261672527 0.148112461 -0.13719362 0.257077992 -0.218246922 -0.313012958 -0.297004819 0.252085567 0.0247449372 0.166506022 0.857164681 -0.372256815 0.235705838 -0.553722918 -0.101398349 0.162480056 -0.525835276 0.552634835 -0.291392624 0.239562869 0.089638263 -0.45046708 0.275012016 0.703736246 0.183334365 -0.550596297 0.442782044 -0.569128394 -0.167915761 0.155499145 -0.0759052858 -0.113326609 0.0732875764 -0.0355549306 0.176219583 0.374874502 -0.372993559 -0.253400624 0.182430014 0.0592905693 -0.206759244 -0.0691197217 0.131935075 0.00696112681 -0.367399246 -0.232469589 0.466585875 -0.162109777 0.115579836 0.643571615 -0.214837983 -0.691242337 -0.312438846 0.496434003 -0.241094291 0.451972604 -0.684773207 0.485243648 0.564350128 0.407692522 -0.106020607 -0.0442059971 -0.182433009 -0.517948747 -0.0542827882 -0.961716831 -0.183227256 -0.101574965 0.14500244 0.365451694 1.19151461 -0.680979073 -0.165965438 0.148734957 0.0729711205 0.281161726 -0.783484578 0.0552340671 0.0297651794 0.625767648 -0.238705114 -0.5209921 -0.236991256 -0.0184604581 -0.153505862 0.0660603344 0.139852792 -1.53137326 -0.263466567 -0.0693236887 0.448065311 -0.222175509 0.583903491 -0.130363449 -0.581843495 -0.484847426 0.228958338 0.383707702 0.476502478 0.0751588494 -0.627201855 -0.359865695 -0.0238717888 -0.464656919 -0.217509851 -0.71478
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
5
train
5
0
2
10
gradWeight
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
130
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2
1 1152
1152 1
2016-01-23 13:36:54 +13:00
1
4
2016-01-23 13:36:54 +13:00
131
3
V 1
17
torch.CudaStorage
1152
0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781250186 0.00781
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
10
fgradInput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
132
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
4
padH
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2
dH
1
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
2
2
kW
1
3
2015-05-16 17:48:05 +12:00
1
14
4
2016-01-23 13:36:54 +13:00
133
2015-05-16 17:48:05 +12:00
3
V 1
7
nn.View
3
2016-01-23 13:36:54 +13:00
134
4
2
12
numInputDims
1
3
2015-05-16 17:48:05 +12:00
2
4
size
4
2016-01-23 13:36:54 +13:00
135
2015-05-16 17:48:05 +12:00
3
V 1
17
torch.LongStorage
1
-1
2
2016-01-23 13:36:54 +13:00
5
train
5
2015-05-16 17:48:05 +12:00
0
2
11
numElements
1
1
2015-05-16 17:48:05 +12:00
2
5
train
5
0