1
0
Fork 0
mirror of synced 2024-09-21 12:02:33 +12:00
waifu2x/models/anime_style_art/scale2.0x_model.t7

1728 lines
4.2 MiB
Text
Raw Normal View History

2015-05-16 17:48:05 +12:00
4
1
3
V 1
13
nn.Sequential
3
2
3
2
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
3
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
7
modules
3
2016-01-23 13:36:54 +13:00
4
2015-05-16 17:48:05 +12:00
14
1
1
4
2016-01-23 13:36:54 +13:00
5
2015-05-16 17:48:05 +12:00
3
V 1
23
nn.SpatialConvolutionMM
2015-05-16 17:48:05 +12:00
3
2016-01-23 13:36:54 +13:00
6
17
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
4
padW
1
0
2
11
nInputPlane
1
1
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
7
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2
2015-05-16 17:48:05 +12:00
9
2016-01-23 13:36:54 +13:00
gradInput
4
8
2015-05-16 17:48:05 +12:00
3
V 1
2016-01-23 13:36:54 +13:00
16
torch.CudaTensor
0
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2
2016-01-23 13:36:54 +13:00
8
gradBias
4
2016-01-23 13:36:54 +13:00
9
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
1
32
1
1
4
2016-01-23 13:36:54 +13:00
10
2015-05-16 17:48:05 +12:00
3
V 1
2016-01-23 13:36:54 +13:00
17
torch.CudaStorage
32
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
2
2
dW
1
2015-05-16 17:48:05 +12:00
1
2
12
nOutputPlane
2015-05-16 17:48:05 +12:00
1
32
2015-05-16 17:48:05 +12:00
2
4
2016-01-23 13:36:54 +13:00
bias
4
11
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
1
32
1
2015-05-16 17:48:05 +12:00
1
4
2016-01-23 13:36:54 +13:00
12
2015-05-16 17:48:05 +12:00
3
V 1
17
torch.CudaStorage
2016-01-23 13:36:54 +13:00
32
-0.0041856044 0.00449605146 -0.00511856284 -0.0409714431 0.0140047427 0.00837770291 0.00468924595 0.00509926351 0.000402718841 -0.133709729 -0.00818221178 -0.00475453911 0.00488521857 0.0319522917 -0.00832067244 -0.00502372626 0.00249423017 -0.0161819458 -0.0968043357 -7.73325792e-06 -0.00173107465 0.000221900191 0.00149381883 -0.0277224034 0.00182603975 0.00497381575 0.00390738575 0.0327064544 -0.000618327002 0.00409557857 -0.00140493643 -0.00104531518
2
2
kH
1
3
2015-05-16 17:48:05 +12:00
2
6
finput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
13
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
6
weight
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
14
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
2
32 9
9 1
2015-05-16 17:48:05 +12:00
1
4
2016-01-23 13:36:54 +13:00
15
3
V 1
17
torch.CudaStorage
2016-01-23 13:36:54 +13:00
288
0.0501390025 0.0859109387 0.192231342 -0.0082904119 -0.0651069731 0.0416583605 -0.0312922224 -0.228285104 -0.0382434539 0.0627975613 -0.0812152252 -0.0257898849 0.239179716 -0.0036726119 -0.218260646 0.0362724662 0.0291189551 -0.0374670178 0.079236038 -0.191149488 -0.0454185456 -0.111782089 0.0542540289 0.0660647303 0.131675929 -0.0952108875 0.107346497 -0.0966027454 0.0522311814 0.0922833607 -0.0936966091 -0.104282133 0.197558165 0.0134681985 0.167941034 0.141892239 -0.0595209636 0.116022147 -0.00318002491 0.0179651994 0.0197204761 -0.17445755 0.0446809269 -0.064922221 -0.0197479669 0.0405128486 -0.0263799932 0.0483097807 0.131051481 0.0931494683 0.026129052 -0.0107609779 -0.0962676108 -0.200322896 -0.123736732 -0.0809580982 0.155686945 0.0269938521 -0.0304880552 0.0442673676 -0.10150604 0.0558914803 0.0535391048 -0.0324191265 0.0261670835 -0.0574153513 0.0051597734 0.0107304119 -0.000608393282 -0.0484527424 0.0594362803 0.0019389427 0.102813713 -0.0200765207 -0.0606881417 0.0595417283 -0.015866315 0.065481104 0.0062121097 -0.127900973 -0.00420388859 0.027849678 -0.0420292392 0.00604736246 0.137055099 0.0401882604 -0.0150436638 0.0110903224 -0.0183431953 -0.00402118824 -0.0167821106 0.173845008 0.0330869146 -0.0993408933 0.20569253 -0.128574148 0.0414213873 -0.102767892 -0.124448545 -0.0353359096 -0.0293803867 0.0274499301 -0.0954653248 0.0790360868 0.0754103512 0.05638941 -0.226360336 0.13381803 -0.028958289 -0.107489452 0.279084772 0.070844315 0.064961642 0.03567883 -0.0397491455 -0.209822178 -0.0647572875 0.0296762064 -0.0861755386 -0.203374147 -0.00990181137 -0.0832363367 0.082306996 -0.00279969652 -0.0831711516 0.078374967 -0.0296966489 0.063838318 0.0118313115 -0.0254799668 0.014592438 -0.0424077995 -0.00541763566 -0.0508844666 0.0505562127 -0.0402809493 -0.0217597671 0.00204626494 0.0850130245 -0.0155017553 -0.00485858414 0.0506324805 0.00944436993 -0.0914864093 0.0407985784 0.00449568452 0.0220941976 0.0320161581 -0.118229903 -0.11827907 -0.0162113849 0.058757104 -0.0176638644 0.0360478759 0.0466365181 0.0340380296 0.0530561395 0.0244238898 0.0349891782 -0.147274569 -0.100476392 0.0241987221 0.0111121796 -0.0197022334 -0.0226413403 -0.0339717753 -0.0303968452 0.0346747302 -0.00781666581 0.0844705105 0.0867047012 0.223274291 0.064191699 -0.132711783 0.0433277972 -0.0768990368 -0.0522122309 -0.0371659547 0.113410726 -0.143558845 -0.00221093581 -0.00520955864 0.000487533835 -0.0791257471 0.0333868526 0.0413701572 0.0765097737 -0.0542562418 -0.0163680203 0.0310082361 -0.123244375 0.101751216 -0.0155034596 0.0824330077 -0.0250270162 -0.00266128825 -0.00594707904 -0.0556391142 -0.17864427 -0.0999630317 -0.0331025086 0.118453212 0.0554118864 0.130801678 0.169981435 -0.103224948 -0.0573646016 0.158731744 -0.142683506 0.0493406542 0.0390134715 0.0244039875 -0.0319362022 0.06995745 0.0409982949 0.0207351446 0.0492577478 0.127420664 0.0634153113 -0.0162214469 0.117760144 0.00612258865 -0.128459528 -0.141396195 -0.0766678378 0.106620215 -0.0647677556 -0.0656903237 -0.0714770257 -0.0741125718 0.0637278035 -0.00255951146 -0.0883994624 0.00886486284 0.166733027 -0.0607282668 -0.121182837 0.0783930495 0.00335127534 0.129798949 -0.229393438 -0.0404069908 0.0752980039 0.00256814738 -0.0110759754 -0.161897048 -0.131597534 0.0257955976 -0.0225871112 0.0413266905 -0.0309119634 -0.0527125485 -0.159609526 0.0196624305 -0.143715844 -0.0613779537 0.0833381489 -0.00290112523 0.185886994 -0.0181967765 0.0963298529 -0.0848667473 0.119019441 0.153670967 -0.0876335874 -0.0795347169 -0.124361433 0.143306881 0.0372560099 -0.0754579902 -0.0363407359 0.0455767848 0.0400739647 -0.0613305345 -0.00731785363 0.0393307582 -0.136827797 0.0422729738 0.0804485008 0.0160814673 -0.0502084196 0.00493516261 0.0766872764 0.0312092155 -0.123582423 -0.0826476216 0.0185637716 0.0934238955
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
5
train
5
0
2
10
gradWeight
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
16
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2
32 9
9 1
2015-05-16 17:48:05 +12:00
1
4
2016-01-23 13:36:54 +13:00
17
3
V 1
17
torch.CudaStorage
288
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
10
fgradInput
2015-05-16 17:48:05 +12:00
4
18
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
2015-05-16 17:48:05 +12:00
1
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
4
padH
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2
dH
2015-05-16 17:48:05 +12:00
1
1
2016-01-23 13:36:54 +13:00
2
2
kW
1
3
2015-05-16 17:48:05 +12:00
1
2
4
19
2015-05-16 17:48:05 +12:00
3
V 1
2016-01-23 13:36:54 +13:00
14
w2nn.LeakyReLU
2015-05-16 17:48:05 +12:00
3
20
2015-05-16 17:48:05 +12:00
5
2
2016-01-23 13:36:54 +13:00
5
train
5
0
2
2015-05-16 17:48:05 +12:00
8
negative
4
21
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
22
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
23
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2016-01-23 13:36:54 +13:00
2
14
negative_scale
1
0.10000000000000001
2015-05-16 17:48:05 +12:00
1
3
4
24
2015-05-16 17:48:05 +12:00
3
V 1
23
nn.SpatialConvolutionMM
2015-05-16 17:48:05 +12:00
3
25
2016-01-23 13:36:54 +13:00
17
2
2016-01-23 13:36:54 +13:00
4
padW
2015-05-16 17:48:05 +12:00
1
0
2015-05-16 17:48:05 +12:00
2
11
nInputPlane
1
32
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
26
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
27
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
1
0
2
2016-01-23 13:36:54 +13:00
8
gradBias
2015-05-16 17:48:05 +12:00
4
28
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
1
2016-01-23 13:36:54 +13:00
32
1
1
4
29
3
V 1
17
torch.CudaStorage
32
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
2
2
dW
1
1
2015-05-16 17:48:05 +12:00
2
12
nOutputPlane
1
2015-05-16 17:48:05 +12:00
32
2
4
2016-01-23 13:36:54 +13:00
bias
4
30
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
1
32
1
2015-05-16 17:48:05 +12:00
1
4
2016-01-23 13:36:54 +13:00
31
2015-05-16 17:48:05 +12:00
3
V 1
17
torch.CudaStorage
2016-01-23 13:36:54 +13:00
32
-0.0332527794 0.0122848898 -0.0226091184 0.0190826226 0.00443748757 -0.00956844352 0.0117331799 0.00258728 0.0430913344 0.00669546518 0.0105603877 -0.00136660377 0.0112413997 -0.00950775295 0.00533034606 0.0317111537 -0.0241121482 0.0111194123 -0.00566241704 -0.00857343432 0.00618323684 0.000781538547 0.0113879181 0.0274273362 -0.0316061378 -0.0144351674 0.0108897975 -0.0010727169 0.0115182484 0.00198771921 0.006183316 -0.00446563587
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
2
kH
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
3
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
6
finput
4
32
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
6
weight
2015-05-16 17:48:05 +12:00
4
33
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2
32 288
288 1
2016-01-23 13:36:54 +13:00
1
4
2016-01-23 13:36:54 +13:00
34
3
V 1
17
torch.CudaStorage
9216
0.017740557 -0.0127522154 0.0653563663 -0.0116822468 -0.0360333137 -0.0235378873 0.0902850926 0.0981333032 0.178674445 0.109802283 0.128684998 0.000600347703 0.0242039524 -0.0288500395 0.128363818 0.122100562 -0.014659062 0.0888091326 0.104490325 0.0193674099 -0.113629296 -0.0657720938 -0.00986821856 -0.202242836 -0.0924219564 -0.171462625 0.0938629061 0.0310611539 0.00343201985 0.122485697 -0.0396630019 -0.0883922204 0.0177410543 0.0151052466 0.0330102146 -0.00508752558 -0.00232727197 -0.0968053043 0.00756591 -0.0478386544 0.0566374548 -0.0242089443 0.223163724 0.0584736913 -0.137790307 -0.179951727 -0.170192435 -0.00311981351 -0.0599024184 0.0776994973 0.00127884978 -0.110221893 -0.159348622 0.0501483157 0.0439180955 -0.0129283424 -0.0879365504 -0.0578762852 -0.00469014281 0.151607394 -0.0178709552 0.00253210007 0.0296539571 -0.00860391278 -0.0210000295 0.0119420402 -0.0228830595 0.16038397 0.0215680487 0.0328400806 0.0408741981 -0.187293395 -0.0260838605 0.0373394936 0.0343680419 -0.0397165976 0.00476080133 -0.0518608689 0.0599879809 -0.214309663 0.00822878722 -0.183848903 -0.127092719 -0.0375176668 0.142979458 -0.0577868447 0.0869937912 0.0624233559 -0.210599825 0.142899826 -0.0874747112 -0.170183465 0.0271998663 0.1622293 0.00791187119 -0.0370230302 -0.0143522797 -0.175366074 -0.0599188246 -0.030608369 0.00277227093 -0.208718404 0.0494211763 0.0731236264 -0.0259168223 0.110186085 -0.0104383761 0.148463339 0.0889958516 0.0472541861 -0.0205617771 0.0582886748 0.0958792418 0.0691251457 -0.0815446079 -0.0434337109 0.013464137 -0.033474572 -0.0644111335 -0.0386408307 -0.0815001354 -0.0474761426 -0.0809185579 -0.0414415598 0.0131246457 -0.0230828822 -0.0624281131 -0.0972769484 -0.00349776493 -0.0703687668 -0.0797482952 0.0804602802 0.11671602 0.0609620884 -0.0171237364 -0.0355718471 0.0421435945 0.00399605744 -0.0113758724 0.0635453984 -0.038797278 -0.103352696 0.0699459463 0.00021275437 -0.194495678 -0.0144843953 0.0717290789 0.0212521497 -0.0794845819 -0.0474266373 -0.0575163588 0.0977260992 0.187080964 -0.021793209 0.0337930061 0.0109159816 -0.0381913036 0.101604134 -0.134729922 0.0528742969 -0.0769446492 0.00936769415 0.0171907917 0.178554237 0.100074671 0.0158423018 0.0237615872 -0.151135817 0.144873902 0.235735938 -0.123532653 -0.112500161 -0.0992358699 0.0212586988 -0.120515533 0.0809028447 -0.0246783458 0.016346259 0.0816075504 0.0130281122 -0.0641713813 0.0252668895 0.0395533927 -0.0582889952 0.110138655 -0.0748813748 0.119215749 -0.0420284756 -0.011490603 -0.344648153 -0.051228229 0.0371121876 0.209581554 -0.0282798428 -0.168816611 0.0882878229 -0.0601467341 -0.0675185025 -0.195187122 -0.0798163265 0.0298689194 0.113496579 0.0078892922 0.115470193 -0.13774319 0.230787307 0.00565271778 -0.162924394 -0.0105795488 0.0288894363 -0.0252312794 0.0227768738 -0.103962094 0.134384513 -0.0955731198 0.191144884 0.150290161 0.0191911329 0.0189510789 0.0286848731 0.148264408 0.0395678841 -0.0389753543 -0.180538431 -0.0699439943 0.00730148191 0.0460283644 -0.17956765 -0.0965076312 0.0156686939 -0.0833340809 0.213335663 0.0504301824 -0.0789604038 -0.123988956 0.0764328241 -0.0294696726 0.0920631662 -0.077315785 0.0327225849 0.198236734 -0.13172996 0.000495680666 0.0903223008 -0.155218273 0.207197502 -0.112207927 -0.0246804692 -0.051996436 -0.101588354 0.173264891 0.134494364 0.010726559 0.000691405439 0.0571748763 -0.0180732124 -0.0447249338 -0.0014028619 -0.0583446324 0.135691419 0.0920382366 0.103497773 0.139256015 -0.144714668 0.0595338829 -0.091796048 -0.0095088901 0.00191592565 0.0840883479 0.155374661 -0.0979900658 -0.101506807 0.0325736552 -0.0613240674 0.060868375 -0.071074903 -0.0690099299 0.208861262 -0.000493177387 0.125868216 0.0264600478 -0.0174927525 -0.184628025 0.161352277 0.0748661235 -0.0378646366 0.117772706 -0.116486482 0.108756095 -0.250924259 -0.108336903 0.177177146 -0.15540731 -0.101163484 0.0266577024 -0.149871692 0.131136283 0.141038924 0.0789146572 0.0874205008 0.089809902 0.0177101027 -0.124653347 -0.0680522695 -0.015221281 0.0761108026 0.0959688723 -0.0241949093 -0.146467179 -0.0953523889 -0.01116
2015-05-16 17:48:05 +12:00
2
5
train
5
2015-05-16 17:48:05 +12:00
0
2
2016-01-23 13:36:54 +13:00
10
gradWeight
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
35
3
V 1
16
torch.CudaTensor
2
32 288
288 1
1
4
36
3
V 1
17
torch.CudaStorage
9216
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
2
10
fgradInput
4
37
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
4
padH
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2
dH
2015-05-16 17:48:05 +12:00
1
1
2016-01-23 13:36:54 +13:00
2
2
kW
1
3
2015-05-16 17:48:05 +12:00
1
4
4
2016-01-23 13:36:54 +13:00
38
2015-05-16 17:48:05 +12:00
3
V 1
2016-01-23 13:36:54 +13:00
14
w2nn.LeakyReLU
2015-05-16 17:48:05 +12:00
3
2016-01-23 13:36:54 +13:00
39
5
2
5
train
2015-05-16 17:48:05 +12:00
5
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
8
negative
4
2016-01-23 13:36:54 +13:00
40
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
41
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
42
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2016-01-23 13:36:54 +13:00
2
14
negative_scale
1
0.10000000000000001
2015-05-16 17:48:05 +12:00
1
5
4
2016-01-23 13:36:54 +13:00
43
2015-05-16 17:48:05 +12:00
3
V 1
23
nn.SpatialConvolutionMM
2015-05-16 17:48:05 +12:00
3
2016-01-23 13:36:54 +13:00
44
17
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
4
padW
1
0
2
11
nInputPlane
1
32
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
45
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2
2016-01-23 13:36:54 +13:00
9
gradInput
4
2016-01-23 13:36:54 +13:00
46
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
1
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
8
gradBias
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
47
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
1
2016-01-23 13:36:54 +13:00
64
1
1
4
48
3
V 1
17
torch.CudaStorage
64
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
2
2
dW
1
1
2015-05-16 17:48:05 +12:00
2
12
nOutputPlane
2015-05-16 17:48:05 +12:00
1
64
2015-05-16 17:48:05 +12:00
2
4
2016-01-23 13:36:54 +13:00
bias
4
49
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
1
64
1
1
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
50
3
V 1
17
torch.CudaStorage
2016-01-23 13:36:54 +13:00
64
-0.000497418281 -0.00750701781 0.00431805244 0.00445734244 -0.0108872112 -0.00291305105 -0.00574175594 -0.00170683488 0.01103538 -0.00744233746 0.000525828509 0.00375575037 0.00387095474 -0.00342334295 -0.0155102229 0.00314620626 0.00181395561 -0.00605141744 0.00744405249 -0.00846417248 -0.0117503032 -0.00079474051 0.00127779262 -0.00148972543 -0.00181994576 -0.0115679633 -0.00514969649 0.000909990689 -0.00880775228 -0.00182634837 -0.0114975367 -0.00343938731 -0.00358936796 0.00646765111 0.00173867971 -0.000812783546 0.00408669049 -0.00974996854 0.0010241901 -0.00882970728 -0.00357325166 0.000411478657 -0.0178794004 0.000391633832 -0.00161181437 -0.0204118565 -0.00638532778 -0.0108952848 -0.0162763074 0.00339695951 -0.0261583328 0.00873641297 -0.00468947832 -0.00910240132 -0.000352561823 0.00294683827 -0.00890872441 0.0066677588 -0.00123300322 0.00910835061 -0.00243901508 -0.000234173101 -0.00124906818 0.000975484611
2
2
kH
1
3
2015-05-16 17:48:05 +12:00
2
6
finput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
51
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
6
weight
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
52
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
2
64 288
288 1
2015-05-16 17:48:05 +12:00
1
4
2016-01-23 13:36:54 +13:00
53
3
V 1
17
torch.CudaStorage
18432
0.00580692571 -0.0414244272 -0.152294114 -0.00170390098 -0.0279033091 -0.135018155 0.0467667878 0.0539844856 0.0722600818 -0.026270695 -0.0149076525 -0.0638380051 -0.0479183011 0.0405332819 -0.00484349951 -0.0294965692 0.148841307 -0.0305838753 -0.0440944918 0.0640021861 0.0362358727 0.020730719 -0.0329159461 0.0657987371 0.0285851378 0.00371854217 -0.0209012516 0.0113110552 0.0919295251 -0.0986350253 -0.039156042 -0.0121384477 0.0421599336 0.04648057 0.0343412086 0.0820600241 -0.0180674121 0.258939058 0.137669295 -0.242367223 -0.110660113 -0.125722438 -0.0890714824 -0.00459027942 -0.00562986406 0.126868173 0.0525217839 1.69217892e-05 -0.0385521837 -0.0279014651 0.0632754937 -0.0438392609 0.088779889 -0.0282180328 0.0549461395 -0.0373392776 0.0410349593 0.00101732009 0.0256311726 -0.0148119461 -0.0119512705 0.0647633597 0.103615016 -0.00437162397 0.13894619 -0.0256144125 -0.0746883377 0.0467902385 0.0422870815 0.0514222793 0.0232024994 -0.0965623707 0.00596423307 -0.04890389 -0.00635991246 0.0094088614 0.0229541864 -0.0506250821 -0.0177834909 0.00207740976 0.0675913915 0.0609928742 -0.0989945009 -0.0359361246 -0.0806874633 0.0814657435 0.10958419 -0.0321524329 -0.122065529 0.00868235901 0.0158598851 0.0121745868 0.087885581 -0.0380748548 -0.0852723941 -0.0924992636 0.0629567653 -0.0426828302 0.161970541 0.123957276 0.104311414 -0.0963594243 -0.0575460084 0.0639552623 -0.0499134138 -0.0544369444 -0.04181825 -0.0153634809 -0.08313936 0.0686011538 0.0565891489 -0.0311857462 0.07661511 0.0261277948 -0.0221761875 -0.0452837199 -0.270510107 0.0158873033 -0.0370684117 -0.106963739 0.0695485994 -0.0560539551 -0.0312379301 0.0450551026 0.0201310422 0.0144835413 -0.0473846085 0.100743793 -0.099377729 0.0366382711 0.0378319956 -0.0554587282 -0.0143389609 0.0848590508 0.0628367662 -0.0273563489 -0.0442955233 0.00508755678 0.0146907764 -0.0848465189 -0.127163216 0.156445816 -0.0570359267 -0.0831248537 -0.00188195473 0.0153140975 -0.0340033025 0.00102596742 -0.00798502006 -0.065263927 -0.0365045927 0.0120407511 0.0479852557 0.111539379 0.0270318538 -0.00135846448 -0.0899699703 0.0387204029 0.105756633 -0.106631733 -0.0497673601 -0.113278106 -0.140246123 -0.0540163256 0.00789677445 0.0957456604 0.149505958 0.0361416824 -0.0242772792 0.040470615 0.0227889717 0.035995122 -0.0684094578 -0.094002068 -0.0318228565 0.0467452928 0.0763497949 -0.0180893205 0.0524981171 -0.195532441 -0.182571262 -0.0921338499 0.0454329737 0.00349277514 -0.0797292143 -0.0217634682 0.0151616801 0.0185396001 0.0539813973 -0.0665404275 -0.0184022952 -0.0567215085 0.177029431 0.0132404594 -0.0162221789 0.0251955297 -0.0778411403 -0.0943416953 -0.00917925499 -0.155407161 0.15934518 0.0842524394 -0.0614000335 0.0384992398 0.129085883 -0.0663340464 -0.0269088503 -0.00434973883 0.0544984899 -0.114547104 -0.0255495664 0.0190253742 -0.0556144156 -0.0600439049 0.0571349151 0.0187335182 0.036506556 0.0596305206 -0.0218853429 0.00962598529 0.0501697361 -0.0972179845 -0.100274727 -0.0536566898 0.0418357588 0.0364614576 -0.108654693 0.0471447185 0.0176880732 -0.076986596 -0.128088608 -0.0565236844 -0.100989759 -0.073118329 0.127905563 0.0234882999 0.0130915474 0.111369543 0.0459470488 -0.18366605 0.0886353627 -0.0192512497 -0.0579845347 0.195762753 -0.0184051581 0.0485309958 -0.0454763211 0.0176002514 -0.0151099693 0.0194681641 -0.0980668291 0.0479954481 -0.00660562888 0.0429306887 0.0385738835 -0.131910205 0.125984013 0.0484747104 0.112733223 0.000371853821 -0.0803661048 0.0966762975 -0.0874112099 -0.0876546875 0.013535006 0.101950608 0.0563522354 -0.0984219313 -0.0355243012 -0.0611726642 0.0642947927 0.0167563185 -0.113181353 -0.0188462269 0.0249420702 0.0305753164 0.028409984 0.0170990583 0.0538676605 -0.118623219 0.0218772441 0.00925702602 0.167738885 0.0611135811 -0.0937258154 0.0313254222 0.173658952 -0.0862518698 0.0454094447 -0.0080683846 0.00362654286 -0.0517780781 -0.151251927 -0.0490230508 -0.0334663205 -0.0607771911 -0.0846318454 0.0357940607 -0.109044939 0.171056628 -0.156602144 -0.0983566642 0.122317001 0.101564609 -0.11340563 -0.0811678544 0.0361424237 0.016771
2
2016-01-23 13:36:54 +13:00
5
train
5
0
2
10
gradWeight
4
2016-01-23 13:36:54 +13:00
54
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2015-05-16 17:48:05 +12:00
2
64 288
288 1
2016-01-23 13:36:54 +13:00
1
4
2016-01-23 13:36:54 +13:00
55
3
V 1
17
torch.CudaStorage
18432
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
10
fgradInput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
56
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
4
padH
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2
dH
2015-05-16 17:48:05 +12:00
1
1
2016-01-23 13:36:54 +13:00
2
2
kW
1
3
2015-05-16 17:48:05 +12:00
1
6
4
2016-01-23 13:36:54 +13:00
57
2015-05-16 17:48:05 +12:00
3
V 1
2016-01-23 13:36:54 +13:00
14
w2nn.LeakyReLU
2015-05-16 17:48:05 +12:00
3
2016-01-23 13:36:54 +13:00
58
5
2
5
train
2015-05-16 17:48:05 +12:00
5
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
8
negative
4
2016-01-23 13:36:54 +13:00
59
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
60
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
61
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2016-01-23 13:36:54 +13:00
2
14
negative_scale
1
0.10000000000000001
2015-05-16 17:48:05 +12:00
1
7
4
2016-01-23 13:36:54 +13:00
62
2015-05-16 17:48:05 +12:00
3
V 1
23
nn.SpatialConvolutionMM
2015-05-16 17:48:05 +12:00
3
2016-01-23 13:36:54 +13:00
63
17
2
2016-01-23 13:36:54 +13:00
4
padW
2015-05-16 17:48:05 +12:00
1
0
2015-05-16 17:48:05 +12:00
2
11
nInputPlane
1
64
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
64
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
65
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
1
0
2
2016-01-23 13:36:54 +13:00
8
gradBias
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
66
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
1
2016-01-23 13:36:54 +13:00
64
1
1
4
67
3
V 1
17
torch.CudaStorage
64
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
2
2
dW
1
1
2015-05-16 17:48:05 +12:00
2
12
nOutputPlane
1
64
2
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
bias
4
68
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
1
64
1
2015-05-16 17:48:05 +12:00
1
4
2016-01-23 13:36:54 +13:00
69
2015-05-16 17:48:05 +12:00
3
V 1
17
torch.CudaStorage
2016-01-23 13:36:54 +13:00
64
-0.00615406455 -0.00119863905 -0.0142061478 -0.00432105176 -0.00918286201 -0.0147218676 -0.00405015936 -0.0194947142 -0.00878369715 -0.0044124811 -0.00746293319 -0.00192394038 -0.0068479795 -0.00700129336 -0.00868807454 -0.0138463555 -0.00769102899 -0.00858625304 -0.00492772181 -0.0121936686 -0.00693520857 -0.00791009422 -0.00821356382 -0.00781429093 -0.00217574113 -0.00857519358 -0.00458916556 -0.00429539056 -0.00329310657 -0.00501032872 -0.00744024385 -0.00742011797 0.00263696723 -0.00547094084 0.00201249844 -0.00854781363 -0.00535255997 -0.00256216223 -0.00138747145 -0.00369037502 -3.0162686e-05 0.000870355288 -0.00539692631 -0.000767338206 -0.00625611655 -0.00138010585 -0.00261524133 -0.00220555486 -0.00556404423 -0.00684869895 -0.00939573441 -0.00385987409 -0.0286292676 -0.00543430634 -0.00305944588 -0.0051061376 -0.00555490283 0.00038610617 -0.000838006963 -0.00420844415 -0.00510518486 -0.00387054635 -0.00485834386 -0.00564613566
2
2
kH
1
3
2015-05-16 17:48:05 +12:00
2
6
finput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
70
3
V 1
16
torch.CudaTensor
2015-05-16 17:48:05 +12:00
0
1
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
6
weight
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
71
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
2
64 576
576 1
2015-05-16 17:48:05 +12:00
1
4
2016-01-23 13:36:54 +13:00
72
3
V 1
17
torch.CudaStorage
36864
-0.0144594349 -0.0169598497 -0.0681581274 -0.186449349 0.0652370453 0.00656394102 -0.0623901859 0.0374456942 0.0478968993 0.0690207183 0.041482091 0.134866834 -0.0281203613 -0.0166830681 -0.128446281 0.121193804 -0.111018948 0.0133738173 0.0568074994 -0.00452324143 -0.129435092 -0.0910397395 0.0168912914 0.116348892 -0.180139139 -0.0389692187 -0.0232122671 -0.0166746117 0.0788569003 -0.116358511 0.0369908698 -0.084483169 -0.045644924 0.099816896 -0.0805572867 0.0245570503 -0.0716914088 0.0370230861 -0.0390119366 -0.117114633 -0.0541878529 -0.0923837796 -0.0108750928 -0.0189886633 0.0232492145 -0.0265827589 -0.0804779604 0.0822921991 0.0995258763 -0.0621127784 -0.108716428 -0.0254821815 0.025636917 0.0466265902 -0.00323425536 -0.00975827686 0.0714994147 0.0424475446 -0.140598044 -0.190623105 0.0821738914 0.0903710127 -0.0608677678 -0.0534625575 -0.0249282736 0.0687592402 -0.0791572034 -0.0013783226 -0.00799037889 0.140130669 0.0553687364 -0.0627483353 -0.0221495554 0.0951200649 -0.024709696 -0.0783751756 0.0607544892 -0.063119255 -0.0817904323 -0.0698124617 0.135572791 -0.0389915667 0.00446935324 -0.0933350772 0.0813700631 -0.0900100023 0.0891276896 0.0759762451 0.0214290842 -0.0686510354 -0.0309136193 -0.123343758 -0.0126513662 -0.175055385 0.010378656 -0.0975579917 -0.0998675898 -0.0959913731 0.033363454 -0.103602111 -0.0833872035 -0.134746835 -0.0573449507 0.168854803 -0.0328240357 -0.109475031 -0.120554961 -0.0286561921 -0.045316495 0.0076709385 -0.102988414 -0.049052529 -0.050046932 -0.0105939936 -0.270847052 -0.0409061499 0.0711176395 0.097719498 -0.034039516 0.00722879823 0.163464144 0.0151490383 0.00796840806 0.0543935224 0.121738993 0.134974211 0.0874029249 -0.0428910628 -0.128271282 0.00259572407 -0.0520407632 -0.0314186327 -0.0979823396 0.109905519 -0.0682129636 -0.279303521 -0.0584229752 0.0060105701 0.10854125 0.037146382 0.0830220506 -0.167353958 0.0345925912 -0.0452634245 0.0348180421 -0.0582361035 -0.0621163808 0.0292500444 0.0463077053 0.0377813801 -0.0144574335 0.00395988533 0.129189357 -0.00349222822 -0.0246277824 0.161676511 -0.104664758 0.00117854797 -0.0160785802 0.0757940933 0.193549082 0.113491103 -0.0650186613 0.0900016725 -0.0139918281 -0.0403540842 -0.0229105391 0.0358463787 -0.0789285675 -0.115612909 0.0539280362 -0.117236622 -0.101044998 0.165553063 -0.0777892917 -0.10559243 -0.0471817777 0.00837867614 0.0339251794 -0.184351265 -0.169653744 0.20217523 -0.052092541 -0.107610568 0.0132880006 0.00538756 -0.0264015999 -0.0255730432 0.0770911798 -0.0617247857 -0.00613114284 0.117371447 -0.0283626709 -0.0346790291 -0.230354249 0.0783134028 0.125119075 -0.187900096 0.199364319 0.143086612 -0.0226341505 0.146089062 -0.148985863 -0.20443444 0.163906366 -0.0893093944 -0.0576494038 0.0980084464 0.0173851401 0.0434832536 -0.0177633893 -0.0960629284 -0.0446474962 0.19917722 0.153600767 0.0611421727 0.0738207027 -0.0128683951 -0.0112908985 0.0680049956 0.00371218543 0.00503659714 -0.00019429448 0.0499799661 -0.00390871568 -0.0694023743 -0.126159027 -0.208149895 0.0639125779 -0.049795948 0.0144455908 -0.00622350629 -0.118217215 0.0757025406 0.0172230899 0.0468908101 -0.109173059 -0.111921452 0.0401935503 0.0184888616 0.0187996682 -0.0467174239 -0.0413386114 -0.145184129 -0.097943373 -0.00456151925 -0.112134635 -0.0138005307 -0.0599178337 -0.00525773177 -0.0466657691 -0.0952619538 -0.0668549091 -0.126768708 -0.0404397696 0.120851412 -0.0337907299 -0.0391380414 -0.0147025203 0.0423668548 -0.119272582 0.0351773463 -0.0721297488 -0.0936758369 0.0262438674 0.0216190591 0.0612214506 0.0836079717 0.0879672021 0.0486146063 0.0203755777 -0.122466758 -0.0102807917 0.0118138203 0.0188789256 0.0101454556 -0.0312095657 -0.0407936163 0.196714193 -0.0802895352 -0.133714899 0.0822643861 -0.0350923352 0.00347328396 -0.00482697133 -0.183631659 -0.0195364878 -0.0429843292 0.146042347 0.0953915045 0.0630129203 -0.0133145414 -0.00624469109 -0.0112894932 0.158517733 -0.0683189034 0.0263257939 -0.00236407341 0.00567549421 -0.016939532 0.0637087151 0.0544565842 0.0674437955 -0.0440653749 -0.0368555635 0.0140053183 0.0389649719 0.0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
5
train
5
0
2
10
gradWeight
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
73
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2
64 576
576 1
2016-01-23 13:36:54 +13:00
1
4
2016-01-23 13:36:54 +13:00
74
3
V 1
17
torch.CudaStorage
36864
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
10
fgradInput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
75
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
4
padH
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2
dH
2015-05-16 17:48:05 +12:00
1
1
2016-01-23 13:36:54 +13:00
2
2
kW
1
3
2015-05-16 17:48:05 +12:00
1
8
4
2016-01-23 13:36:54 +13:00
76
2015-05-16 17:48:05 +12:00
3
V 1
2016-01-23 13:36:54 +13:00
14
w2nn.LeakyReLU
2015-05-16 17:48:05 +12:00
3
2016-01-23 13:36:54 +13:00
77
2015-05-16 17:48:05 +12:00
5
2
2016-01-23 13:36:54 +13:00
5
train
5
0
2
2015-05-16 17:48:05 +12:00
8
negative
4
2016-01-23 13:36:54 +13:00
78
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
79
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
80
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2016-01-23 13:36:54 +13:00
2
14
negative_scale
1
0.10000000000000001
2015-05-16 17:48:05 +12:00
1
9
4
2016-01-23 13:36:54 +13:00
81
2015-05-16 17:48:05 +12:00
3
V 1
23
nn.SpatialConvolutionMM
2015-05-16 17:48:05 +12:00
3
2016-01-23 13:36:54 +13:00
82
17
2
2016-01-23 13:36:54 +13:00
4
padW
2015-05-16 17:48:05 +12:00
1
0
2015-05-16 17:48:05 +12:00
2
11
nInputPlane
1
64
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
83
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
84
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
1
0
2
2016-01-23 13:36:54 +13:00
8
gradBias
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
85
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
1
2016-01-23 13:36:54 +13:00
128
1
1
4
86
3
V 1
17
torch.CudaStorage
128
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
2
2
dW
1
1
2
12
nOutputPlane
2015-05-16 17:48:05 +12:00
1
128
2015-05-16 17:48:05 +12:00
2
4
2016-01-23 13:36:54 +13:00
bias
4
87
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
1
128
1
2015-05-16 17:48:05 +12:00
1
4
2016-01-23 13:36:54 +13:00
88
2015-05-16 17:48:05 +12:00
3
V 1
17
torch.CudaStorage
2016-01-23 13:36:54 +13:00
128
-0.00484697055 -0.00543596176 -0.00644996809 -0.0534761436 -0.00571516389 -0.00328024058 -0.00333266729 -0.00496959221 -0.00300167128 -0.0329714939 -0.00327529781 -0.00479285046 -0.00842487253 -0.00326085952 -0.0796347186 -0.00645183679 -0.00751156965 -0.00435992144 -0.00290005631 -0.00421743281 -0.00719471509 -0.00690480648 -0.00719699683 -0.0101769259 0.0102219684 -0.00428754324 -0.00194046844 -0.0142169697 -0.00462404359 -0.00370686292 -0.00753181009 -0.0034638925 -0.00629598601 -0.00337443571 -0.00495406846 -0.00227904809 -0.0034916352 -0.00199882034 -0.0034218221 -0.00540725794 -0.00337043148 -0.00368155935 -0.00345609058 -0.00573889166 -0.00517528085 -0.00393173657 -0.00332114892 -0.00465431157 -0.00629916042 -0.00359075144 -0.00339672645 -0.00417032 -0.00560023449 -0.00716971001 -0.00558870099 -0.00293285702 -0.00585587835 -0.00575643266 -0.0043735886 -0.00785952527 -0.00574294617 -0.00614715368 -0.00491121691 -0.00698732492 -0.00460147019 -0.00837893877 -0.00387983373 -0.00447750511 -0.00769083062 -0.00400065491 -0.00369932805 -0.00525109842 -0.00624566758 -0.00624118838 -0.00305958604 -0.00165638758 -0.00385711133 -0.00666233245 -0.0075867828 -0.00416760379 -0.00517393183 -0.00572628994 -0.00330813369 0.00116135308 -0.00369474106 -0.00391311897 -0.00602078391 -0.00455040205 -0.00292753289 -0.00663371477 -0.00926616415 -0.00546039036 -0.00549140107 -0.00332406233 -0.00181599054 -0.00309119094 -0.00369919627 -0.00654069521 -0.00287238462 0.000547496078 -0.00478264177 -0.0045996909 -0.00629951246 -0.00748757785 -0.00497833872 -0.00445675012 -0.00914269593 -0.0062220837 -0.00473756017 -0.00325802225 -0.00417468231 -0.00455435645 -0.005219616 -0.00437410502 -0.00220596185 -0.00490419054 -0.003945373 -0.00549722416 -0.00252784207 -0.00759313209 -0.00484434515 -0.00303161261 -0.00439662021 -0.00483324192 -0.00243151211 -0.00282881968 -0.00197068136 -0.00498576136
2
2
kH
1
3
2015-05-16 17:48:05 +12:00
2
6
finput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
89
3
V 1
16
torch.CudaTensor
0
2015-05-16 17:48:05 +12:00
1
0
2
2016-01-23 13:36:54 +13:00
6
weight
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
90
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
2
128 576
576 1
2015-05-16 17:48:05 +12:00
1
4
2016-01-23 13:36:54 +13:00
91
3
V 1
17
torch.CudaStorage
73728
-0.0280291419 -0.0681611523 -0.051976528 -0.0940863341 0.000499520509 0.0198900867 0.0836283788 -0.0190468356 0.092938751 0.0776709244 -0.0492751524 -0.0173792597 -0.0366408676 -0.0550173074 0.0325349271 0.0816485062 0.048214756 0.0267374646 0.129467279 0.0390643552 -0.010308777 -0.013643722 0.006968081 -0.0364220105 -0.0634082034 -0.068395704 -0.0662196726 0.00596299255 0.0902096406 0.086301893 -0.0146669783 -0.115998425 0.0464330316 -0.0524683036 0.0822807997 0.0630219728 -0.025144713 0.0054300474 -0.152135134 0.00817057956 0.0449267961 -0.00763133634 -0.0258624665 -0.0512146354 -0.0049961987 -0.110319652 0.018102888 0.0874356851 -0.088812232 0.0294584185 0.091209285 -0.00626622047 -0.0757882595 -0.0378565565 0.0169807244 -0.10632398 -0.0700619817 -0.0183063485 -0.0752121806 -0.141850278 0.0509602204 0.00980460364 -0.114266276 -0.035277959 -0.0209735315 -0.0700760931 -0.0318972021 -0.0429587476 0.0320173725 -0.0730615333 0.00385055249 0.0347448699 0.0579539612 0.0158168152 0.0241426118 -0.108265206 0.00122565019 0.0319506228 -0.12960498 -0.0565451682 0.0381621011 0.0723494291 0.0521815233 0.143392846 -0.0501729324 -0.000874299381 -0.0217109397 -0.00682379305 -0.0315404572 -0.0769655183 0.0136105483 0.0442162491 0.129754618 0.023861764 -0.0283933617 0.0101897297 0.0137708755 -0.0988339707 0.0416015275 0.102386266 0.000981406891 -0.0713535175 0.0441695824 -0.025040362 -0.200163826 0.0533721 0.0224675275 0.0174378958 -0.0883033797 0.14944832 0.0402795412 -0.0744295716 0.129441202 -0.0298653096 0.0642501935 -0.107872412 0.0337501355 -0.101604305 -0.0975290909 -0.0875216797 0.0696660876 0.0689189658 -0.0537037142 0.0145326583 0.0520786904 -0.0704277679 -0.0106135672 -0.0327203795 -0.0464584269 0.0201721489 -0.0728951618 -0.0837575048 0.0677058622 -0.0290962886 0.0623792857 0.0141337896 -0.00390608003 0.0652443171 0.024478551 -0.00525972946 -0.0195960663 -0.0702616945 0.0262225866 0.0986986458 0.0401345193 0.00914302655 -0.0155188795 0.0169092286 -0.0627921075 -0.0803779364 -0.0149059072 0.0532673784 -0.0368798189 -0.0293110684 0.0242020674 -0.0409005359 0.00586924562 0.0124897482 0.0536135398 -0.0750142634 0.0286792964 0.00583327468 -0.00664191134 0.0728538632 0.11027021 0.0135181351 0.0362414755 -0.0850063711 0.0298274569 -0.024422938 -0.150387719 0.0552784316 0.00473189633 0.0519082583 0.0735446438 0.0376533158 0.0167626031 -0.00292689772 -0.0686798319 0.069124341 -0.0356716476 -0.108403258 0.0212325752 0.0967762172 0.0065472764 0.0558729433 0.0384631939 0.0133285336 0.120308124 -0.0616369881 -0.090456672 -0.110140055 -0.00752859004 -0.00613981625 0.00718666846 -0.100081541 0.031138191 0.0292022545 -0.0323591419 0.0253583733 -0.113937452 0.0962464139 0.0137663456 -0.0193774421 0.0801390111 -0.00298580411 0.0258574951 0.049225606 -0.0485028364 -0.0252963025 -0.0708929077 -0.0240084585 -0.01869574 0.0464397594 -0.0297218878 -0.049344793 -0.0660416633 -0.0898148939 -0.104114488 -0.0617812425 0.00809426606 -0.0317240097 -0.0273384452 0.0792538077 0.00623104069 0.0424447805 -0.0523020178 -0.0681630373 0.0444977507 -0.0292052086 -0.115791962 0.014787158 -0.0451280363 -0.0161283817 -0.0501080863 -0.0109754028 -0.0221889149 -0.0346994586 -0.00812871568 -0.0159449596 -0.0160907507 -0.0208837111 -0.0239322223 0.0150247412 -0.0188441873 -0.181840345 0.0732572451 -0.0501600206 -0.0491529442 -0.0669494271 -0.0692667067 -0.0632623658 0.0254566744 0.0522909537 -0.0244056154 -0.00850801449 -0.0384208299 -0.0299854036 -0.00864452217 -0.00662456732 0.0767730847 -0.0828913376 -0.118212037 -0.0398523733 0.0336566307 -0.0438776761 0.0795387551 -0.0445184372 0.0284670014 -0.00630883593 -0.120041691 -0.101582497 -0.00670617865 -0.0706614032 -0.0794130564 -0.0800084546 0.00437659584 -0.00234594778 0.00614113035 0.0220312979 0.0374335796 -0.145956919 -0.0476102717 -0.0965939984 -0.0485358648 -0.0517308265 -0.00872513 0.024883667 -0.00893685222 0.042313125 -0.030942373 0.0183817763 0.00505160494 0.0368615687 0.00460823812 -0.00019724411 -0.0151835959 0.0376681685 0.00669487705 -0.179852143 -0.115954898 -0.00464914087 0.00332878157 -0.0225621052 0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
5
train
5
0
2
10
gradWeight
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
92
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2
128 576
576 1
2016-01-23 13:36:54 +13:00
1
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
93
3
V 1
17
torch.CudaStorage
73728
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
10
fgradInput
4
2016-01-23 13:36:54 +13:00
94
3
V 1
16
torch.CudaTensor
0
2015-05-16 17:48:05 +12:00
1
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
4
padH
1
2016-01-23 13:36:54 +13:00
0
2
2
dH
1
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
2
2
kW
1
3
2015-05-16 17:48:05 +12:00
1
10
4
2016-01-23 13:36:54 +13:00
95
2015-05-16 17:48:05 +12:00
3
V 1
2016-01-23 13:36:54 +13:00
14
w2nn.LeakyReLU
2015-05-16 17:48:05 +12:00
3
2016-01-23 13:36:54 +13:00
96
5
2
2015-05-16 17:48:05 +12:00
5
2016-01-23 13:36:54 +13:00
train
5
0
2015-05-16 17:48:05 +12:00
2
8
negative
4
2016-01-23 13:36:54 +13:00
97
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
98
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
99
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2016-01-23 13:36:54 +13:00
2
14
negative_scale
1
0.10000000000000001
2015-05-16 17:48:05 +12:00
1
11
4
2016-01-23 13:36:54 +13:00
100
2015-05-16 17:48:05 +12:00
3
V 1
23
nn.SpatialConvolutionMM
2015-05-16 17:48:05 +12:00
3
2016-01-23 13:36:54 +13:00
101
17
2
2016-01-23 13:36:54 +13:00
4
padW
2015-05-16 17:48:05 +12:00
1
0
2015-05-16 17:48:05 +12:00
2
11
nInputPlane
1
128
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
102
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
103
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
1
0
2
2016-01-23 13:36:54 +13:00
8
gradBias
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
104
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
1
2016-01-23 13:36:54 +13:00
128
1
1
4
105
3
V 1
17
torch.CudaStorage
128
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
2
2
dW
1
1
2015-05-16 17:48:05 +12:00
2
12
nOutputPlane
1
128
2
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
bias
4
106
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
1
128
1
2015-05-16 17:48:05 +12:00
1
4
2016-01-23 13:36:54 +13:00
107
2015-05-16 17:48:05 +12:00
3
V 1
17
torch.CudaStorage
2016-01-23 13:36:54 +13:00
128
-0.0359542966 -0.00875733793 -0.00199514348 -0.0230031721 0.0262361728 -0.0198646486 -0.00775036076 0.0175496675 -0.0217739157 -0.00435825298 -0.00569268176 -0.00361043634 -0.0179202314 -0.00629017223 -0.0174324512 -0.00448334916 -0.0158105586 -0.02067356 -0.00228544651 -0.0112311868 -0.0256539155 -0.0275696237 -0.00547963101 -0.00160513283 -0.00127979333 -0.000391748064 -0.0285296515 -0.017269209 -0.0142066674 0.000898262195 -0.00983283296 -0.0170431659 -0.00729628373 -0.018964896 -0.0282980967 -0.0187596492 -0.0161691569 -0.00364947715 -0.0184815358 -0.000961978571 -0.0179273598 -0.00609348295 0.00955065619 -0.0213712342 -0.0078135971 0.00312555349 -0.00917218067 -0.0131798796 -0.0171428528 -0.00813257508 -0.00413069641 0.00206672843 0.00086015783 -0.00670221169 -0.0200054143 -0.0203091688 -0.00792848133 -0.0223116837 -0.026123004 -0.0219703633 -0.019766707 0.00193509227 -0.00711801322 -0.0286102574 0.00945514068 -0.0236286875 -0.00471310597 -0.010117528 -0.00409212383 0.000614054268 0.00067003211 -0.0042591258 -0.0256554335 -0.0136005012 -0.00317145162 0.00158509274 -0.00322809862 -0.00577842863 -0.0608920492 -0.0120086642 -0.0237921402 -0.0282487739 0.011994564 0.00221048645 -0.00935304165 -0.00600907719 -0.0190780628 -0.0189184919 -0.0188345686 0.0117257796 -0.0275828019 -0.0109819518 0.00697251316 -0.00445747189 -0.00216594548 -0.0244845431 -0.0184176359 -0.0252559576 -0.00789795257 0.0164397489 -0.0132111032 -0.0177875832 0.0532361344 -0.00493637752 -0.00627674954 -0.0072434633 -0.0219290294 -0.0127624264 -0.00297946134 -0.000277394487 0.00437465077 -0.00669830758 -0.0186769441 -0.00431000767 0.00902436953 -0.0254697632 -0.0111477077 -0.00293605286 -0.0321335644 -0.00308535318 0.0118633434 -0.00416353066 -0.0214639511 -0.011145399 -0.0140300132 -0.00954174157 -0.010235101 -0.0191434231
2
2
kH
1
3
2015-05-16 17:48:05 +12:00
2
6
finput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
108
3
V 1
16
torch.CudaTensor
2015-05-16 17:48:05 +12:00
0
2016-01-23 13:36:54 +13:00
1
0
2015-05-16 17:48:05 +12:00
2
6
weight
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
109
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2
128 1152
1152 1
2016-01-23 13:36:54 +13:00
1
4
2016-01-23 13:36:54 +13:00
110
3
V 1
17
torch.CudaStorage
147456
-0.102338716 -0.0580728799 0.0188191496 0.0963510126 0.117048621 0.0410754941 -0.000908333866 0.0117182331 -0.0258993655 0.00608388195 -0.0550822057 -0.0154454894 0.0784685537 -0.0326428935 0.0435534939 -0.0410360172 -0.0186926443 -0.00629037619 -0.0481806323 -0.0813235417 -0.012821856 -0.0823911279 -0.0132897655 0.0310091022 -9.36446668e-05 0.0398209579 -0.0120583838 -0.0940939784 -0.0110125653 -0.0173730329 -0.108717442 -0.0310662612 -0.048765447 0.00393699901 0.0604683459 0.0390752368 -0.0528409183 -0.00662275637 0.010188139 -0.0680701882 0.0455582887 -0.00569743896 0.0487454124 -0.0280199852 0.0315543748 0.0127331493 -0.0993248001 -0.0704130009 0.00670079794 -0.0823101178 0.0377970636 0.0361985639 -0.0528996363 0.0590057969 -0.0251032151 -0.111098059 0.0342819095 -0.0585974902 -0.0696500167 -0.0582085922 -0.105262175 0.000786235556 0.0107494025 -0.00490643457 -0.0187917016 -0.0882210806 -0.0464165471 0.00384645932 -0.0620848201 -0.08672335 0.0198861267 -0.0265345611 -0.00587457139 -0.0193479452 0.0273614004 0.0146678509 -0.0821426213 0.0328512043 0.0320309028 -0.0134895714 -0.00273056468 -0.0454011746 -0.0254785828 -0.0231192894 0.00166923495 0.0651001185 0.0890162587 -0.00292890356 -0.0350192264 -0.0799802095 -0.127356961 -0.0281870756 0.0184649415 -0.048674766 -0.0398717187 -0.0340443812 -0.0853252634 -0.0545461439 0.0349940397 -0.0111286733 0.0659281909 0.00138483837 -0.0263127573 -0.0589617118 -0.0384242721 -0.0227942802 -0.0644063652 -0.053461872 -0.0306248832 -0.00879806466 0.0166677367 0.0028791381 0.0320714638 0.000102271661 0.0323820412 0.0189913046 -0.01712122 0.00785127003 0.0717265978 -0.00456392393 0.048505418 -0.0548756048 -0.0646810159 -0.0614735037 -0.0266460851 0.0103966501 0.000120065495 0.0775522962 0.0382095091 0.000847223972 0.0552804805 0.0301470943 -0.0163758192 -0.0294185318 0.00262097968 0.00275992393 -0.00215600431 -0.073546879 0.00853693113 -0.0449119806 0.00378321717 -0.0181628149 -0.00534031726 0.00536462525 -0.174596086 0.0268056579 0.0683989674 -0.00536954496 -0.0359325409 -0.0953298882 -0.0255901217 -0.0705077052 0.00942522101 0.0705976114 0.0581716076 -0.04685295 0.0321637355 -0.0288228858 -0.0461895056 -0.0728249997 -0.034362983 -0.00661877031 0.0960759223 0.00171218556 -0.0373219587 -0.0412443243 -0.0147114163 -0.035698995 0.0332785696 -0.00967663713 0.0612399802 -0.0691213235 -0.0176678784 -0.0306798741 0.0111587467 0.0140417414 -0.0391090661 -0.0441998169 -0.0344364271 -0.0676014796 -0.0160920788 -0.136424169 -0.0460195728 0.0925896987 0.0648560375 0.118055321 -0.0176533442 0.0311933272 -0.0112678427 -0.089250274 -0.0337938927 -0.0142360181 -0.0408538617 -0.0295443814 0.0522743352 -0.00716825994 -0.0854670182 -0.0262751915 -0.0171848238 -0.0357304215 -0.0233433153 0.0135688027 -0.0494497307 -0.0042501092 -0.0578313954 0.0273454618 0.0848569125 -0.0891007632 -0.069837831 0.044810541 0.130403459 0.088812083 0.0679683909 0.0575698018 0.0701644644 0.102282017 0.0207210332 0.0529844724 0.0383775085 0.0193555728 0.0410649627 0.021507455 0.0923203528 0.0366265252 -0.0512819812 -0.0966896489 -0.0245271735 0.0383932702 0.121924974 0.0684771165 0.0130907297 0.0042228424 0.00284422422 -0.0144858612 0.0761248991 0.0488803908 -0.0329126455 -0.00416032039 0.0321523324 0.00625000382 -0.0221919604 -0.0470254682 0.0506513491 0.0135683343 0.080718942 0.022069443 0.0185045172 0.0288125649 0.0963874385 0.0355092995 0.0260103587 -0.013173596 -0.0677245632 -0.0641080737 -0.0036950116 -0.0339470208 -0.0869487077 -0.0203954801 -0.00657695299 0.0289771315 0.0789048672 -0.0501712337 0.0330751576 -0.0927507281 -0.057266064 -0.0355818272 -0.0121603413 -0.0562102981 0.0164209679 -0.0210733842 -0.0344370119 -0.0291337501 0.027777249 0.00233435584 -0.00762727484 -0.0209535938 -0.0404832102 -0.0413836017 0.00154407579 0.0870274007 0.0769770741 0.0306865554 -0.0914810225 0.0189763289 0.0491151586 0.0108392294 -0.0305102617 -0.0162025373 -0.0745340139 0.029034894 0.0388157554 -0.0653211251 -0.0182831585 0.0166664217 -0.00657831645 0.0711418241 0.0845278203 0.0319779478 -0.0147071443 0.0800903738 -0.0313892923 0.0
2015-05-16 17:48:05 +12:00
2
5
train
5
2015-05-16 17:48:05 +12:00
0
2
2016-01-23 13:36:54 +13:00
10
gradWeight
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
111
3
V 1
16
torch.CudaTensor
2
128 1152
1152 1
1
4
112
3
V 1
17
torch.CudaStorage
147456
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
2
10
fgradInput
4
113
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
4
padH
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2
dH
2015-05-16 17:48:05 +12:00
1
1
2016-01-23 13:36:54 +13:00
2
2
kW
1
3
2015-05-16 17:48:05 +12:00
1
12
4
2016-01-23 13:36:54 +13:00
114
2015-05-16 17:48:05 +12:00
3
V 1
2016-01-23 13:36:54 +13:00
14
w2nn.LeakyReLU
2015-05-16 17:48:05 +12:00
3
2016-01-23 13:36:54 +13:00
115
5
2
5
train
2015-05-16 17:48:05 +12:00
5
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
8
negative
4
2016-01-23 13:36:54 +13:00
116
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
117
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
118
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2016-01-23 13:36:54 +13:00
2
14
negative_scale
1
0.10000000000000001
2015-05-16 17:48:05 +12:00
1
13
4
2016-01-23 13:36:54 +13:00
119
2015-05-16 17:48:05 +12:00
3
V 1
23
nn.SpatialConvolutionMM
2015-05-16 17:48:05 +12:00
3
2016-01-23 13:36:54 +13:00
120
17
2
2016-01-23 13:36:54 +13:00
4
padW
2015-05-16 17:48:05 +12:00
1
0
2015-05-16 17:48:05 +12:00
2
11
nInputPlane
1
128
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
121
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
122
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
1
0
2
2016-01-23 13:36:54 +13:00
8
gradBias
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
123
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
1
1
1
1
4
124
3
V 1
17
torch.CudaStorage
2015-05-16 17:48:05 +12:00
1
0
2
2016-01-23 13:36:54 +13:00
2
dW
1
1
2
12
nOutputPlane
2015-05-16 17:48:05 +12:00
1
1
2
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
bias
4
125
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
1
1
1
2015-05-16 17:48:05 +12:00
1
4
2016-01-23 13:36:54 +13:00
126
3
V 1
17
torch.CudaStorage
2016-01-23 13:36:54 +13:00
1
0.0155135468
2
2
kH
1
3
2015-05-16 17:48:05 +12:00
2
6
finput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
127
3
V 1
16
torch.CudaTensor
2015-05-16 17:48:05 +12:00
0
1
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
6
weight
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
128
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
2
1 1152
1152 1
2015-05-16 17:48:05 +12:00
1
4
2016-01-23 13:36:54 +13:00
129
3
V 1
17
torch.CudaStorage
1152
-0.926220119 -0.214996397 -0.372250259 0.038935177 -0.0615110807 0.822556734 0.286043912 0.597079456 -0.641955793 -0.592794538 0.958044887 -0.0302086733 0.550047755 -0.16167973 -0.951080739 -0.253790706 -0.163210571 0.645737231 -0.0938901007 0.0219216198 0.145658195 -0.0450992547 -0.483620316 0.526854336 -0.51127696 0.785240054 -0.343326688 -0.100868136 0.143092617 0.163633808 -0.514183342 -0.140756875 -1.11663842 -0.143148556 0.728977323 0.505918145 -0.429059803 -0.562808871 0.304971546 0.896603227 -0.212511823 -0.606179476 -0.302019447 1.00996339 -0.0861074328 -0.429586649 0.403850734 -0.195179641 -0.0198243093 0.287101984 0.00761117646 0.175159022 -0.348772407 0.0598665141 0.285232425 0.534559786 -0.671950758 -0.0987206921 -0.599982321 -0.246356353 0.183696866 0.44075641 0.14159362 0.598527431 -0.401841819 -0.182091653 0.188356414 0.459032625 0.42649731 0.586627483 0.408952683 0.803086877 -0.10946279 0.482523799 -0.283389777 0.125581563 -0.179843768 -0.937302947 -0.242436469 -0.547035277 -0.0821166039 -0.000185470388 0.0994515195 0.00275809434 0.0364742167 -0.0739749223 -0.127779648 -0.0507329069 0.398988485 -0.337873876 -0.243484274 -0.0808575749 -0.138994038 0.615754366 0.116339266 -0.657850087 0.0716011897 0.35570845 0.0475971103 -0.583096206 -0.0357835181 0.0116871521 -0.252684623 0.097515814 0.437057227 0.170340955 0.146309376 0.00779882632 -0.215657994 0.700009167 0.109106362 -0.0195020121 -0.115906104 0.78657192 -0.207830653 -0.434971303 -0.531513155 -0.118092515 1.0574019 -0.52113229 -0.297916293 0.26560542 -0.329635322 -0.0951333195 0.191173553 0.35803476 -0.827917814 0.0681333914 -1.06351984 0.0965640917 0.0833633766 0.11580269 0.075609006 0.312657177 0.271849483 0.391442448 -0.478853256 -0.141031593 0.350239664 0.581375301 0.178651944 -0.829025149 0.547985613 -0.642818093 0.449065953 0.166697487 -0.330766618 -1.22934794 0.700447857 -0.0701570064 -0.204099402 0.226399153 0.247032285 -0.0174348336 1.0078547 -0.214352533 0.642157853 0.185873285 -0.624391377 -0.332158506 -0.381932467 -0.633916497 -0.136397645 -0.087514326 0.216631904 0.233600304 -0.23811847 0.478633344 -0.197686151 -0.0202206746 -0.278351277 0.769041002 -0.400952697 0.0968008265 0.336189419 -0.575200796 0.438648224 -0.3759754 -0.32051006 0.20388785 -0.600788832 0.31214425 -0.283400476 -0.254615366 -0.146873549 -0.63660115 0.0765489787 0.378999412 0.197462693 0.370964527 -0.543355048 0.200523466 0.173068464 -0.0140061211 -0.534938753 -0.625154138 0.064660497 -0.763451099 -0.859583974 -0.13942638 0.17434749 0.32611692 -0.0474941991 0.217825457 -0.224350557 0.334193051 0.244785637 0.166986778 -0.151273444 -0.387211502 -0.669655144 -0.229193553 0.0811776742 0.682413042 0.525756419 0.0713281929 -0.133232698 -0.178329617 0.22585015 0.297647417 -0.22272867 0.168550208 -0.102757007 0.268084139 -0.30693379 -0.303984761 -0.294178188 0.241806567 0.0376468562 0.156442136 0.84028089 -0.35989958 0.242776766 -0.559467793 -0.0484599397 0.139452115 -0.387388408 0.556863248 -0.280523449 0.301773876 0.152444825 -0.454530895 0.316184014 0.499027997 -0.0326605327 -0.319401711 0.411989152 -0.522607446 0.00737683754 0.147433311 -0.0701675415 -0.0561395772 0.0948357806 -0.111877188 0.175616726 0.377284974 -0.38480109 -0.22205539 0.250006407 0.019542722 -0.146269903 -0.131585002 0.0310425758 -0.0706066638 -0.388446122 -0.316121906 0.367956281 -0.199679703 0.0461040623 0.570214152 -0.20475781 -0.662368417 -0.272643119 0.463187993 -0.245591506 0.467430919 -0.670163214 0.467997015 0.537063658 0.421324402 -0.090896748 -0.0274266805 -0.162823662 -0.499452114 -0.0352917016 -0.939256251 -0.16201362 -0.0837981552 -0.00185187673 0.14058888 0.934961796 -0.669778109 -0.0848949924 0.111436866 0.153583393 0.0994485766 -0.781505048 0.119708605 0.0641608015 0.611078858 -0.194725543 -0.492576987 -0.237195194 0.098524034 -0.0489094704 0.147233814 0.196363285 -1.45757139 -0.192883641 -0.0253027212 0.513103843 -0.156462491 0.625477612 -0.0718203783 -0.51587671 -0.435968131 0.217860907 0.369125068 0.514770091 0.0966287702 -0.566249728 -0.33773607 -0.00133034028 -0.415571898 -0.128837645
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
5
train
5
0
2
10
gradWeight
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
130
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2
1 1152
1152 1
2016-01-23 13:36:54 +13:00
1
4
2016-01-23 13:36:54 +13:00
131
3
V 1
17
torch.CudaStorage
1152
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
10
fgradInput
2015-05-16 17:48:05 +12:00
4
2016-01-23 13:36:54 +13:00
132
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
4
padH
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2
dH
2015-05-16 17:48:05 +12:00
1
1
2016-01-23 13:36:54 +13:00
2
2
kW
1
3
2015-05-16 17:48:05 +12:00
1
14
4
2016-01-23 13:36:54 +13:00
133
2015-05-16 17:48:05 +12:00
3
V 1
7
nn.View
3
2016-01-23 13:36:54 +13:00
134
4
2
12
numInputDims
1
3
2015-05-16 17:48:05 +12:00
2
4
size
4
2016-01-23 13:36:54 +13:00
135
3
V 1
17
torch.LongStorage
1
-1
2
2016-01-23 13:36:54 +13:00
5
train
5
2015-05-16 17:48:05 +12:00
0
2
11
numElements
1
1
2015-05-16 17:48:05 +12:00
2
5
train
5
0