1
0
Fork 0
mirror of synced 2024-07-07 23:46:16 +12:00
waifu2x/models/anime_style_art/scale2.0x_model.t7

1672 lines
4.6 MiB
Plaintext
Raw Normal View History

2015-05-16 17:48:05 +12:00
4
1
3
V 1
13
nn.Sequential
3
2
2016-03-27 21:15:14 +13:00
5
2
5
_type
2
16
torch.CudaTensor
2
6
output
4
2015-05-16 17:48:05 +12:00
3
2016-03-27 21:15:14 +13:00
3
V 1
16
torch.CudaTensor
0
1
0
2015-05-16 17:48:05 +12:00
2
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
4
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
7
modules
3
2016-03-27 21:15:14 +13:00
5
2015-05-16 17:48:05 +12:00
14
1
1
4
2016-03-27 21:15:14 +13:00
6
2015-05-16 17:48:05 +12:00
3
V 1
23
nn.SpatialConvolutionMM
2015-05-16 17:48:05 +12:00
3
2016-03-27 21:15:14 +13:00
7
16
2015-05-16 17:48:05 +12:00
2
2016-03-27 21:15:14 +13:00
2
dH
1
1
2
2
dW
1
1
2
11
nInputPlane
1
1
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
8
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2
2016-03-27 21:15:14 +13:00
2
kH
2015-05-16 17:48:05 +12:00
1
2016-03-27 21:15:14 +13:00
3
2
2016-01-23 13:36:54 +13:00
9
2016-03-27 21:15:14 +13:00
gradInput
4
4
2016-01-23 13:36:54 +13:00
2
2
2016-03-27 21:15:14 +13:00
kW
2015-05-16 17:48:05 +12:00
1
2016-03-27 21:15:14 +13:00
3
2015-05-16 17:48:05 +12:00
2
12
nOutputPlane
2015-05-16 17:48:05 +12:00
1
32
2015-05-16 17:48:05 +12:00
2
4
2016-03-27 21:15:14 +13:00
padW
2015-05-16 17:48:05 +12:00
1
0
2
2016-01-23 13:36:54 +13:00
6
weight
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
9
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
2
32 9
9 1
2015-05-16 17:48:05 +12:00
1
4
2016-03-27 21:15:14 +13:00
10
3
V 1
17
torch.CudaStorage
2016-01-23 13:36:54 +13:00
288
2016-03-27 21:15:14 +13:00
0.0782349929 0.191984624 0.272343576 -0.00486729993 -0.0453879982 0.0374132395 -0.0620120727 -0.346025914 -0.118555695 0.125717834 -0.1091602 -0.101463407 0.36691308 0.0188022163 -0.305511773 0.0302380007 0.0524867363 -0.0761042833 0.114563905 -0.101151228 -0.0361376368 -0.144636452 0.128091171 0.00116677396 0.103651583 -0.0637200251 -0.0085421484 -0.0671499148 0.0552384555 0.083179377 -0.0628028512 -0.0888628289 0.195475727 0.0191749893 0.156807229 0.112785622 -0.00130802963 0.0339566842 -0.0794140175 0.0675331056 0.0367526934 -0.138654947 -0.00558234006 -0.0931635275 -0.076088503 0.0397767164 -0.0629430711 0.0601006933 0.267352015 0.0754927769 -0.0196926221 0.0270499866 -0.132670745 -0.249330163 -0.113319196 -0.022048384 0.247866571 -0.0731963217 -0.13218841 0.0313575305 -0.125899076 0.0491306521 0.138609543 -0.0123230591 0.0100632356 0.00602835463 0.00838788785 -0.0222988706 -0.00790065154 -0.0142950974 0.00352283055 0.00143485947 -0.00315738376 -0.0039989315 0.00116292888 -0.00379934139 -0.01008783 0.0232498869 -0.00227809348 0.0111760646 -0.014409801 0.0220551211 -0.0479491241 0.0139118973 0.342933118 0.0894612446 -0.0383298956 0.0149149159 -0.0381731763 0.00781061873 0.0298009925 0.141468599 -0.0087740887 -0.0815517008 0.190214783 -0.223194525 0.0252797268 -0.0505319685 -0.0310693476 -0.019720329 -0.0836877078 0.040958751 -0.0465813763 0.0851141289 0.0437510386 0.0698064417 -0.247310415 0.14213185 -0.159779102 -0.183992475 0.339421868 0.111111723 0.0388071947 0.0619061813 0.036992617 -0.179400265 -0.0626586303 0.0253308024 -0.190010026 -0.2581653 0.00307779643 -0.0828531533 0.0560730398 0.00435990561 -0.00817565899 0.0225553885 0.000247155695 0.000475438399 0.000638616679 -0.000187458165 0.00577628566 -0.00573291536 0.00425002584 -0.00468704337 0.00391255645 -0.0123531744 0.0071705305 -0.00362383365 0.0136781903 -0.0162168331 0.00626910338 -0.00303721242 0.00914176367 -0.0096761426 0.0351143628 -0.00559320906 0.0301436577 0.00552752009 -0.171754614 -0.106645308 0.0371221416 0.0897970647 -0.0286361519 0.0483004488 0.104739197 0.112715915 0.102504864 0.0583291836 0.0897096097 -0.119839184 -0.297308594 -0.103161246 -0.00681937998 0.0102477325 -0.0293690972 -0.12369445 -0.030740235 -0.0135767963 -0.0524245054 0.149919361 0.101438269 0.210124403 0.0829519629 -0.138744146 0.0353372209 -0.113179334 -0.0720849931 0.0232454613 0.0912134796 -0.122981787 0.0155053223 -0.0295406226 0.00426602503 -0.0377645977 0.044829309 -0.00453431811 0.0163183212 -0.0170444455 0.000639921869 -0.0110740447 -0.111448027 0.155823126 -0.00293366099 0.0784427002 -0.104752786 0.0191998817 0.0148780625 -0.052091524 -0.173841208 -0.162043259 -0.0981596708 0.119640775 0.0903737396 0.122807033 0.133991346 -0.0310506746 -0.0037080762 0.0995668843 -0.150134638 0.0377889276 0.0190417971 0.0328046679 -0.0290766619 0.0210008845 0.0127607053 -1.79732142e-05 -0.0125208478 0.0233511925 -0.00461180089 -0.00469212653 0.0758368149 -0.0665626898 -0.0113586728 -0.0879958123 0.063361004 0.0355962031 0.018056877 -0.037960615 -0.0512568206 -0.0090626888 0.0460358672 0.0049141827 -0.00737007009 -0.00588513678 0.139413789 -0.0703029633 -0.0685049742 0.0781417638 -0.0143818874 0.0823117048 -0.207786366 -0.0410233401 -0.000285623741 0.0477505736 0.00267729908 -0.123243049 -0.0856110156 -0.0343887769 -0.0630916059 0.0253452174 -0.0593176559 -0.013858187 -0.0117234765 0.0241196547 -0.0159073509 -0.0821960866 0.0601311848 -0.0142131727 0.113204062 -0.0925677568 0.00418318296 -0.149549052 0.0989877284 0.175331727 -0.21909298 -0.112977996 -0.149240017 0.284078628 0.104611956 -0.0272846669 -0.00365117681 0.00361246499 0.00355244637 0.00169698894 -0.0119616762 0.00498537766 -0.0271517951 0.0218030196 0.00190932723 0.0180088058 -0.0512719341 0.0178224351 0.110656664 -0.0502234548 -0.0574259683 -0.141560301 0.105563119 0.0429680943
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
5
train
5
0
2
10
gradWeight
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
11
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2
32 9
9 1
2015-05-16 17:48:05 +12:00
1
4
2016-03-27 21:15:14 +13:00
12
2016-01-23 13:36:54 +13:00
3
V 1
17
torch.CudaStorage
288
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
2015-05-16 17:48:05 +12:00
2
2016-03-27 21:15:14 +13:00
5
_type
2
16
torch.CudaTensor
2
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
bias
4
13
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
1
2016-03-27 21:15:14 +13:00
32
1
1
4
14
3
V 1
17
torch.CudaStorage
32
0.00450654002 0.00392684108 -0.00731999939 -0.108959325 0.0690198615 0.00303309737 -0.000211530947 0.00323210051 -0.004111588 -0.360852331 -0.0199906845 -0.00757607026 0.00245789299 0.0143602211 -0.00276709464 0.00101351005 0.00573554309 -0.0120463418 -0.0251691584 -0.00531600881 -0.000651442562 -0.00223018019 -2.56461535e-05 -0.0105679892 0.00663680863 -0.00279732863 0.00224809442 0.0807927325 -0.00410205312 0.00208568177 -0.00767741259 -0.000301330292
2015-05-16 17:48:05 +12:00
2
2016-03-27 21:15:14 +13:00
8
gradBias
2016-01-23 13:36:54 +13:00
4
2016-03-27 21:15:14 +13:00
15
3
V 1
16
torch.CudaTensor
2015-05-16 17:48:05 +12:00
1
2016-03-27 21:15:14 +13:00
32
2015-05-16 17:48:05 +12:00
1
1
2016-03-27 21:15:14 +13:00
4
16
3
V 1
17
torch.CudaStorage
32
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
2016-01-23 13:36:54 +13:00
2
2016-03-27 21:15:14 +13:00
4
padH
2016-01-23 13:36:54 +13:00
1
2016-03-27 21:15:14 +13:00
0
2015-05-16 17:48:05 +12:00
1
2
4
2016-03-27 21:15:14 +13:00
17
2015-05-16 17:48:05 +12:00
3
V 1
2016-01-23 13:36:54 +13:00
14
w2nn.LeakyReLU
2015-05-16 17:48:05 +12:00
3
2016-03-27 21:15:14 +13:00
18
6
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
5
train
5
0
2
2016-03-27 21:15:14 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
19
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
20
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-03-27 21:15:14 +13:00
5
_type
2
16
torch.CudaTensor
2
8
negative
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
21
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2016-01-23 13:36:54 +13:00
2
14
negative_scale
1
0.10000000000000001
2015-05-16 17:48:05 +12:00
1
3
4
2016-03-27 21:15:14 +13:00
22
2015-05-16 17:48:05 +12:00
3
V 1
23
nn.SpatialConvolutionMM
2015-05-16 17:48:05 +12:00
3
2016-03-27 21:15:14 +13:00
23
16
2
2016-03-27 21:15:14 +13:00
2
dH
1
1
2
2
dW
1
2015-05-16 17:48:05 +12:00
1
2
11
nInputPlane
1
32
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
24
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2016-03-27 21:15:14 +13:00
2
kH
1
3
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
25
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
1
0
2
2016-01-23 13:36:54 +13:00
2
2016-03-27 21:15:14 +13:00
kW
2016-01-23 13:36:54 +13:00
1
2016-03-27 21:15:14 +13:00
3
2015-05-16 17:48:05 +12:00
2
12
nOutputPlane
1
2015-05-16 17:48:05 +12:00
32
2
4
2016-03-27 21:15:14 +13:00
padW
1
0
2
6
weight
2016-01-23 13:36:54 +13:00
4
2016-03-27 21:15:14 +13:00
26
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-03-27 21:15:14 +13:00
2
32 288
288 1
2015-05-16 17:48:05 +12:00
1
4
2016-03-27 21:15:14 +13:00
27
2015-05-16 17:48:05 +12:00
3
V 1
17
torch.CudaStorage
2016-03-27 21:15:14 +13:00
9216
0.17692025 0.145680174 0.115932211 0.0711811408 0.0577021465 0.0333789252 -0.000539663655 0.093978934 0.10091424 0.19068253 0.201463342 -0.120314538 0.059622556 0.109437324 -0.031027887 0.16280295 0.0750259534 0.0426276252 -0.130158678 0.0624070503 0.205608428 -0.255197763 -0.0755791068 -0.0188089516 0.100074872 -0.0385932736 0.0921711698 0.0467759855 0.0559202135 0.18207629 -0.0505997501 -0.0766776949 0.035590779 -0.0916610509 -0.0218578596 -0.0390413329 -0.0292372964 -0.188798398 -0.200374931 0.0236990992 0.0234539509 -0.0176525284 0.268782765 0.0506248996 -0.0484535098 -0.237314343 -0.404485971 -0.167209104 -0.123935632 -0.13205643 -0.0168351009 0.0394856595 -0.222908065 0.0746523663 -0.023847498 0.0769522712 0.0258860551 -0.187375456 -0.130509481 -0.207221076 0.0416100621 -0.00975461025 0.0450714156 -0.0676234514 -0.0466578826 0.0236215256 -0.0213730633 0.140255272 0.0538420938 -0.031979844 0.036206834 -0.0937447846 0.0733919442 0.00419218699 0.0591615513 -0.0289946757 0.00888051186 -0.114118174 0.0620652698 -0.046532169 0.0316581056 -0.46004957 -0.275235683 -0.427049994 -0.120413333 0.016865734 -0.263629079 0.0239973571 0.00108616101 0.127000138 -0.0706301928 -0.0849443227 0.110207677 0.0258761067 0.196124569 0.202709645 -0.00238237344 -0.0858836398 -0.230582833 -0.192311868 -0.113162354 -0.165904447 -0.0381715484 0.0224872995 -0.129481733 0.121716097 0.0217076335 0.0515524149 0.0188512597 0.0207464695 -0.203330815 0.230657324 0.11009109 -0.0235397257 0.0358989723 0.0861203 0.179656133 -0.234855413 -0.0829705819 -0.0374530256 -0.150588542 -0.102568045 -0.138813511 -0.038059108 0.0784603059 -0.0517193899 -0.0924748927 -0.0490476973 -0.0191137698 -0.0941876024 -0.0576180816 0.108009048 0.0315551348 0.173880816 -0.0301501546 0.00456433324 -0.0770385638 0.0203490723 -0.0264071375 0.0466267988 -0.00373972999 -0.0742220357 0.0779028535 0.0286288448 -0.196093842 -0.0740215853 -0.0139910728 0.0221323818 0.17310974 0.0789686739 -0.068216145 -0.000918406236 -0.0819882378 -0.0519670211 -0.160288751 -0.12848638 -0.0601982847 0.103326738 -0.0877602845 -0.110660963 -0.176879466 -0.0747964457 -0.182275593 -0.174205408 -0.0625339672 -0.0910546258 0.00360320741 -0.104046747 -0.165891066 -0.0341899097 -0.0801013559 -0.354732066 -0.341820598 0.0840742663 0.0248743072 0.263511658 0.264505357 0.0348288231 -0.167258069 -0.439319044 -0.0966207758 0.00405263808 0.128416106 -0.162158087 -0.0263973847 -0.0535936803 0.0550627485 -0.0359481573 0.0972725153 -0.34793368 0.198230624 -0.0153336972 0.0459726416 -0.148116753 -0.130618766 0.0949504599 0.0105409594 0.0143968761 -0.0404790491 -0.154047728 0.122758329 -0.190672427 -0.109755576 0.125397727 -0.037061628 0.0909382328 -0.0563246533 -0.225812554 0.0378293507 0.0258463193 -0.0380528606 0.0267955698 -0.107615113 0.169942245 -0.0775794014 0.137797967 -0.0195994861 -0.105416603 -0.177488953 -0.0439775661 0.0925285891 -0.0407380871 -0.00373737491 0.0330966264 0.0515263528 -0.0436418541 0.05022645 -0.136822939 -0.116171412 0.0524650104 -0.0866936073 0.162297279 0.0886679441 -0.0610582456 0.136220753 -0.00576786976 -0.122761197 -0.0358993039 0.00143279787 0.0532809608 -0.203478888 -0.0830361471 0.114576384 0.0715700015 -0.144206241 0.0386677384 -0.0661821365 0.0830057338 -0.0875783265 0.00927096233 0.163505003 0.0834169984 -0.0384890474 0.0694369823 -0.0367874168 -0.292761654 0.00133711833 0.0229022261 -0.094093658 0.0537424833 0.170316517 -0.0140808932 -0.0597846173 -0.487458855 0.206680015 0.0305006877 0.000593891833 0.105436802 0.0773407742 -0.00244690967 0.022865193 -0.0111321928 0.0257602483 -0.0463520214 0.103641234 -0.0975304171 -0.127594322 0.157465205 0.0560036115 0.141848966 0.00814647321 -0.112447999 -0.0512365364 0.224767253 -0.0678343549 -0.0603169836 -0.119074099 -0.112026632 0.0202038586 -0.732502043 -0.0336496904 0.209631979 -0.683701694 -0.258822083 0.0519417115 -0.535837889 0.0842547864 0.0719756708 0.177095175 0.0260791779 0.110164978 -0.0255374666 0.00051845843 0.00617192499 -0.144487947 0.0652243569 0.00124777691 -0.0181673728 -0.171229169 -0.0865651518 0.0843675062 -0.0681765
2015-05-16 17:48:05 +12:00
2
2016-03-27 21:15:14 +13:00
5
train
5
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2016-03-27 21:15:14 +13:00
10
gradWeight
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
28
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2
32 288
288 1
2016-01-23 13:36:54 +13:00
1
4
2016-03-27 21:15:14 +13:00
29
2016-01-23 13:36:54 +13:00
3
V 1
17
torch.CudaStorage
9216
2016-03-27 21:15:14 +13:00
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
2015-05-16 17:48:05 +12:00
2
5
2016-03-27 21:15:14 +13:00
_type
2
16
torch.CudaTensor
2015-05-16 17:48:05 +12:00
2
4
2016-03-27 21:15:14 +13:00
bias
4
30
2016-01-23 13:36:54 +13:00
3
V 1
16
torch.CudaTensor
2016-03-27 21:15:14 +13:00
1
32
1
2016-01-23 13:36:54 +13:00
1
4
2016-03-27 21:15:14 +13:00
31
2016-01-23 13:36:54 +13:00
3
V 1
17
torch.CudaStorage
2016-03-27 21:15:14 +13:00
32
-0.0624344498 -0.0102353403 -0.0279447976 0.0158191007 -0.0239382237 0.0194311235 -0.00712656649 0.000718905649 0.0798480511 -0.00815884862 0.0114927087 -0.00263883849 0.015193508 -0.00445010047 0.00203093234 0.0236716159 -0.0426892564 -0.0473427847 -0.0147768622 -0.0040861778 0.00168631133 -0.000311422453 0.0559143014 0.0369714685 -0.0818731785 -0.00368040707 0.0250020456 -0.0400637351 -0.00106001657 -0.0173648726 0.0255460013 -0.0134020718
2016-01-23 13:36:54 +13:00
2
2016-03-27 21:15:14 +13:00
8
gradBias
2016-01-23 13:36:54 +13:00
4
2016-03-27 21:15:14 +13:00
32
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
1
2016-03-27 21:15:14 +13:00
32
1
1
4
33
3
V 1
17
torch.CudaStorage
32
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
4
padH
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
1
4
4
2016-03-27 21:15:14 +13:00
34
2015-05-16 17:48:05 +12:00
3
V 1
2016-01-23 13:36:54 +13:00
14
w2nn.LeakyReLU
2015-05-16 17:48:05 +12:00
3
2016-03-27 21:15:14 +13:00
35
6
2016-01-23 13:36:54 +13:00
2
5
train
2015-05-16 17:48:05 +12:00
5
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2016-03-27 21:15:14 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
36
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
37
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-03-27 21:15:14 +13:00
5
_type
2
16
torch.CudaTensor
2
8
negative
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
38
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2016-01-23 13:36:54 +13:00
2
14
negative_scale
1
0.10000000000000001
2015-05-16 17:48:05 +12:00
1
5
4
2016-03-27 21:15:14 +13:00
39
2015-05-16 17:48:05 +12:00
3
V 1
23
nn.SpatialConvolutionMM
2015-05-16 17:48:05 +12:00
3
2016-03-27 21:15:14 +13:00
40
16
2015-05-16 17:48:05 +12:00
2
2016-03-27 21:15:14 +13:00
2
dH
1
1
2
2
dW
1
1
2
11
nInputPlane
1
32
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
41
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2
2016-03-27 21:15:14 +13:00
2
kH
1
3
2
2016-01-23 13:36:54 +13:00
9
gradInput
4
2016-03-27 21:15:14 +13:00
42
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
1
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
2
2016-03-27 21:15:14 +13:00
kW
2016-01-23 13:36:54 +13:00
1
2016-03-27 21:15:14 +13:00
3
2015-05-16 17:48:05 +12:00
2
12
nOutputPlane
2015-05-16 17:48:05 +12:00
1
64
2015-05-16 17:48:05 +12:00
2
4
2016-03-27 21:15:14 +13:00
padW
2015-05-16 17:48:05 +12:00
1
0
2
2016-01-23 13:36:54 +13:00
6
weight
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
43
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
2
64 288
288 1
2015-05-16 17:48:05 +12:00
1
4
2016-03-27 21:15:14 +13:00
44
2016-01-23 13:36:54 +13:00
3
V 1
17
torch.CudaStorage
18432
2016-03-27 21:15:14 +13:00
-0.280426323 -0.0266819112 -0.0453942381 -0.11757642 0.227507591 -0.12071459 0.172921896 -0.00275859796 -0.0581167713 0.287426144 -0.127759337 -0.197045088 -0.110488087 -0.0422803573 0.0828196853 0.0299450811 0.0140132485 -0.0877439454 0.00763972616 -0.0993058905 0.0528370552 0.179408699 -0.00690946076 0.0336954705 0.139852241 -0.0281101186 -0.0255348887 0.117579609 -0.0330496058 0.00822725706 -0.0690449625 0.139155045 -0.0787788257 -0.231520697 0.0649818256 0.399601221 -0.225317985 0.00330744544 0.254996836 -0.0472545698 -0.187786922 -0.0190823991 0.188999116 -0.00310380757 0.236920938 0.0838506147 -0.237744853 -0.314207673 0.106062494 0.0655056313 -0.0623506643 0.0996195152 -0.0881926119 -0.0787250996 0.281958967 -0.0671252906 -0.290665358 0.215274125 0.0753008127 -0.131556854 0.18418622 -0.16673328 -0.245258749 -0.146540716 -0.0182463247 -0.153533801 -0.0727192163 0.0271839518 0.0942605287 -0.0543799698 0.136153266 -0.0372378677 -0.0760670528 -0.00770117342 0.120565616 -0.0241651703 0.00967294443 -0.023204999 0.00951152295 -0.000584858586 -0.0614099614 -0.234330267 -0.326810837 0.051456783 -0.494465083 0.211356848 0.211277381 0.149745718 0.0097360583 0.148955375 -0.0303060208 -0.0483492687 -0.0576102734 -0.116938017 0.175089315 -0.0188922957 -0.181922421 0.155630231 0.262128651 0.0657042265 -0.189280719 0.0431570485 -0.0961159542 -0.150577694 0.205704033 0.0538990311 0.121434867 0.00568237435 0.1640241 0.357289523 0.0440077446 -0.0621866249 -0.0695781335 0.233184814 -0.120561227 -0.138611451 -0.140783712 0.0553683564 0.0610244535 -0.0567912348 -0.102154545 -0.138069794 0.0648013875 -0.0315168872 0.0507694297 0.0856465474 -0.0435352363 0.222272724 -0.0293064788 0.0247456171 0.0607346855 -0.100109287 -0.0538304038 -0.0514068231 -0.123755962 0.341881782 0.175477415 -0.251138717 0.160489976 0.143992424 -0.119950011 0.0439395979 -0.123282529 -0.0516357459 -0.214841411 0.161468431 -0.0170902275 0.253221065 -0.102662839 -0.191068366 -0.0130630601 0.025543116 -0.0236942936 0.203059092 -0.291579962 -0.526129365 -0.066565685 0.0526768118 -0.379827768 -0.152073771 0.301951706 0.0771690831 -0.00541179627 0.0211197827 0.0951809511 0.175482318 0.105452195 0.0412100889 -0.164959013 0.0790856779 -0.0796700865 -0.167526394 -0.00496594328 -0.00985649694 0.0012559843 0.0938496515 0.139635816 0.086662285 0.0585201755 -0.117090642 0.0349681452 -0.0388265662 0.0954981074 0.148865461 0.018795874 0.0325338729 0.214389414 0.0318010189 0.0848509148 0.140230581 0.108282723 -0.141893849 -0.0577450432 0.03312397 -0.0706621259 0.173276186 0.0244096164 -0.109082364 -0.0250777863 -0.129335955 0.150274932 -0.190788478 -0.0524561554 0.185256481 -0.0785132796 -0.0666727573 0.10067258 -0.0916846916 0.0705365464 -0.00709375693 0.132546008 0.251852751 -0.206924945 0.115870595 0.0426750667 -0.0386507958 0.0235488918 -0.0551731512 -0.0995060131 0.0679995939 0.0763249248 -0.0476545952 -0.0666695312 -0.0487212241 0.100637525 0.152027965 0.202982068 0.0515063256 -0.111986935 0.114400327 -0.0774517283 0.0163648557 -0.197924703 -0.126260281 -0.0462636836 -0.302929401 0.2488693 -0.016440725 -0.0915587544 -0.0203013271 -0.0839097649 0.11569833 -0.0651740134 -0.00366470055 -0.0426604412 0.00377529813 -0.130495712 -0.0766678005 0.0218284652 0.0239742398 -0.117721133 0.0761251524 0.147027925 0.0137287695 -0.363183916 0.119643271 -0.0456147194 -0.132990897 0.166313097 -0.109309055 -0.0565656945 0.211004913 -0.113150798 -0.141063437 -0.0331851393 -0.0128518008 0.0933161229 -0.100828268 -0.0661476701 -0.0396700203 0.018433854 0.0391776562 -0.0293036606 -0.0878630131 0.18821156 0.130186543 0.0937222838 0.0990079716 0.0337285809 -0.0874640793 -0.182189211 0.0382710174 -0.0246317647 0.152966484 -0.0863225088 -0.12823838 -0.0400035493 0.0353785418 0.0949028358 0.0357724614 0.0437351279 -0.0935078263 -0.0249981582 0.270599067 -0.233117431 0.139248788 -0.0496906452 -0.0364752188 0.0865480974 0.0620020553 -0.0746926814 0.15558432 0.170122445 0.254723251 -0.0895355269 0.127010554 -0.0291496608 -0.168373898 -0.0951677188 0.229105115 -0.00750531163 0.256367952 0.370169431 0.1472
2
2016-01-23 13:36:54 +13:00
5
train
5
0
2
10
gradWeight
4
2016-03-27 21:15:14 +13:00
45
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2015-05-16 17:48:05 +12:00
2
64 288
288 1
2016-01-23 13:36:54 +13:00
1
4
2016-03-27 21:15:14 +13:00
46
2016-01-23 13:36:54 +13:00
3
V 1
17
torch.CudaStorage
18432
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
2015-05-16 17:48:05 +12:00
2
2016-03-27 21:15:14 +13:00
5
_type
2
16
torch.CudaTensor
2
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
bias
4
47
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
1
2016-03-27 21:15:14 +13:00
64
1
1
4
48
3
V 1
17
torch.CudaStorage
64
-0.0213835388 0.0173852444 -9.82999882e-06 -0.00551453372 0.00688595045 0.00593521493 0.0109031135 -0.0103420205 0.0177964717 -0.0131849376 0.00464257319 0.0102987541 0.00801676512 -0.000858445012 -0.0160988644 0.013735557 0.0114505198 -0.016350979 -0.00348122651 0.00340004987 -0.102136739 0.00237672217 0.00277029304 0.00974576082 0.000566722651 0.000114157192 -0.0692258477 -0.00437585032 -0.013809029 0.0100124013 -0.00246305275 0.00275998376 0.00886781327 -0.00316476659 -0.00694363657 -0.0215282124 0.00349286385 -0.00367586105 -0.00582475029 0.00860975869 0.00162026135 0.00749300513 -0.0160499513 -0.012288616 0.00590238674 -0.018274188 0.00369986333 0.0031041638 -0.0101423962 0.00472946977 -0.173965454 -0.00120016665 -0.00117923447 -0.00712431595 0.0098080039 -0.00340720662 -0.0215866063 -0.0125690391 0.00869433861 0.000717177347 0.00604525022 -0.00278042443 -0.00280040619 -0.00465736398
2015-05-16 17:48:05 +12:00
2
2016-03-27 21:15:14 +13:00
8
gradBias
2016-01-23 13:36:54 +13:00
4
2016-03-27 21:15:14 +13:00
49
3
V 1
16
torch.CudaTensor
2015-05-16 17:48:05 +12:00
1
2016-03-27 21:15:14 +13:00
64
2015-05-16 17:48:05 +12:00
1
1
2016-03-27 21:15:14 +13:00
4
50
3
V 1
17
torch.CudaStorage
64
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
2016-01-23 13:36:54 +13:00
2
2016-03-27 21:15:14 +13:00
4
padH
2016-01-23 13:36:54 +13:00
1
2016-03-27 21:15:14 +13:00
0
2015-05-16 17:48:05 +12:00
1
6
4
2016-03-27 21:15:14 +13:00
51
2015-05-16 17:48:05 +12:00
3
V 1
2016-01-23 13:36:54 +13:00
14
w2nn.LeakyReLU
2015-05-16 17:48:05 +12:00
3
2016-03-27 21:15:14 +13:00
52
6
2016-01-23 13:36:54 +13:00
2
5
train
2015-05-16 17:48:05 +12:00
5
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2016-03-27 21:15:14 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
53
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
54
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-03-27 21:15:14 +13:00
5
_type
2
16
torch.CudaTensor
2
8
negative
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
55
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2016-01-23 13:36:54 +13:00
2
14
negative_scale
1
0.10000000000000001
2015-05-16 17:48:05 +12:00
1
7
4
2016-03-27 21:15:14 +13:00
56
2015-05-16 17:48:05 +12:00
3
V 1
23
nn.SpatialConvolutionMM
2015-05-16 17:48:05 +12:00
3
2016-03-27 21:15:14 +13:00
57
16
2
2016-03-27 21:15:14 +13:00
2
dH
1
1
2
2
dW
1
2015-05-16 17:48:05 +12:00
1
2
11
nInputPlane
1
64
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
58
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2016-03-27 21:15:14 +13:00
2
kH
1
3
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
59
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
1
0
2
2016-01-23 13:36:54 +13:00
2
2016-03-27 21:15:14 +13:00
kW
2016-01-23 13:36:54 +13:00
1
2016-03-27 21:15:14 +13:00
3
2015-05-16 17:48:05 +12:00
2
12
nOutputPlane
1
64
2
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
padW
2015-05-16 17:48:05 +12:00
1
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
6
weight
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
60
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
2
64 576
576 1
2015-05-16 17:48:05 +12:00
1
4
2016-03-27 21:15:14 +13:00
61
2016-01-23 13:36:54 +13:00
3
V 1
17
torch.CudaStorage
36864
2016-03-27 21:15:14 +13:00
0.160623267 0.00383288111 0.10791342 -0.172277838 -0.0440958813 -0.102737308 -0.0396719538 -0.0871972367 -0.149306282 -0.0728564709 -0.164615273 -0.0370954871 0.115369573 0.201252922 0.152456492 0.124233671 0.0842166767 0.153189272 -0.0560738258 0.114182487 0.0369049273 0.0399704836 -0.0920772552 0.00515455892 -0.0941950306 0.170068756 0.122847684 -0.105418324 0.127559066 0.0869153142 -0.0509330705 -0.300454348 0.0326067582 0.352639705 0.264817327 -0.224359155 -0.00125803845 -0.0413452648 -0.0592843145 0.14134118 -0.0308900401 -0.0808722749 -0.148271441 -0.104989 -0.0882605389 0.121911258 -0.220713392 -0.0920028389 0.0756040737 -0.0752059892 -0.0798280835 0.119800791 0.115866594 -0.047547657 -0.126511857 0.151784569 0.0954312757 0.0300443806 -0.114172801 -0.167511076 0.113179997 -0.209702581 0.0200466234 0.0838689879 0.130630225 -0.167595699 -0.0447258241 0.280275673 -0.061478015 0.167087734 -0.134388 -0.280786395 0.0120209148 0.214869544 -0.0493012294 0.0861577913 0.210720852 0.206310451 0.00675530918 0.261117548 0.433277905 -0.0439859368 0.00894848816 -0.148001835 0.0184069332 -0.0393072031 0.0178797208 0.111467399 0.127387881 -0.0384801775 0.134156823 -0.133318931 0.150312632 -0.092315115 -0.0410485342 0.0900264084 0.012373073 -0.115107194 0.00579926884 -0.0623210371 -0.152175918 0.0407362692 0.0769338161 -0.0117052142 -0.0452532806 0.30768165 -0.0902925804 -0.338318557 0.0232310276 -0.0157361273 -0.245407119 -0.198588341 -0.188932076 0.153529346 -0.132556081 0.00364877423 0.00346578262 0.174878046 -0.136236534 0.131260708 0.0764627755 0.11742954 -0.0776278302 -0.164609164 -0.0548568405 -0.0727169663 0.0102100931 -0.00239083939 0.204554439 -0.126368269 0.136708751 0.271136194 0.0451826155 0.0386661477 -0.312611043 -0.0596615262 0.111620255 -0.228027165 0.0561824404 0.0399542227 0.162338242 -0.0869788378 -0.303578705 0.188669786 0.0355707109 -0.135844395 0.206359193 0.026157029 0.0142437881 -0.0158725716 0.160851747 0.0782462433 0.11321371 0.114435233 -0.00607725978 0.245355606 -0.0741145685 -0.0309244599 0.162123471 -0.204785645 0.177402139 -0.206343949 -0.0632154569 -0.224924445 0.126652002 -0.156561106 -0.0411970913 -0.234980494 0.0514808446 -0.213536352 -0.155231938 0.0613888279 -0.0275306478 0.0820473284 -0.0775011927 0.0431287326 0.017558815 -0.0907930285 -0.0529724434 0.0559694581 -0.0192665923 0.116854832 -0.0120580038 0.0418510213 -0.147817835 0.104557 0.0829304755 0.00110634021 0.0309621822 0.0696804598 0.0563182198 0.108969979 0.0294680428 -0.209940031 0.00284531014 -0.0117375031 -0.342880756 -0.051791355 0.135668397 -0.0501828417 -0.160485148 -0.034716852 0.0287096575 -0.027721487 -0.0130760437 -0.125569746 -0.396141201 0.0251193512 -0.0743113831 -0.0734172761 0.153773218 0.0209942069 -0.0974915475 0.106174417 -0.14865537 -0.00188433926 0.0849080607 -0.0676427484 -0.0587851256 -0.129017279 -0.0923069417 -0.0593030378 0.0063609262 0.0270494018 -0.013525825 0.0612766258 -0.0194568243 -0.0593842901 0.0821317211 0.216393128 -0.0568384007 -0.14036791 -0.105654612 0.0342408121 0.00747930352 -0.041220624 -0.0542254038 -0.011951548 0.0213491153 -0.0515091643 0.027412558 0.294222564 -0.113344498 -0.0467269905 0.016853977 -0.0237963479 -0.0943631008 -0.0316376649 0.019163385 -0.112079449 -0.0271476731 -0.131636366 -0.0210936125 -0.0426016077 -0.0871308595 -0.106567435 0.130712032 -0.260009617 0.0486289226 0.0818467513 -0.393620014 0.00917407311 0.0771937519 0.126269862 0.0469219536 -0.147979483 -0.193994999 0.213210747 0.155293778 0.146289825 -0.00953235943 -0.100312017 -0.00951288547 -0.266278654 0.331971109 -0.00980166532 -0.108918756 0.0395354591 0.0178623293 0.0205238182 -0.0495584682 0.0481315069 0.055415757 0.0277642366 -0.0215066485 0.0418693051 -0.0848594755 0.0458591841 0.0232887324 0.126706928 -0.0106646521 -0.0472426862 -0.142666504 0.17841205 -0.179658398 0.0722364262 0.224890321 -0.0799911916 0.087548174 -0.0130994646 0.0455728099 -0.0250637289 -0.0027831262 -0.0987124518 -0.0297516454 -0.131735653 -0.0288732424 -0.0710102171 -0.0022556989 -0.138684854 -0.00114520523 -0.148577422 0.104148403 -0.12847858
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
5
train
5
0
2
10
gradWeight
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
62
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2
64 576
576 1
2016-01-23 13:36:54 +13:00
1
4
2016-03-27 21:15:14 +13:00
63
2016-01-23 13:36:54 +13:00
3
V 1
17
torch.CudaStorage
36864
2016-03-27 21:15:14 +13:00
-2.13663982e-11 6.75565272e-18 -2.21793618e-25 -3.88851386e-16 6.1228846e-16 -4.12559865e-14 -1.29790303e-16 7.20776731e-17 -6.8990571e-15 4.09130893e-18 6.96059321e-10 8.07769476e-16 2.16200132e-13 1.76937824e-08 6.47231533e-16 4.7877069e-10 -3.18322479e-09 -2.26873714e-10 -1.24142531e-07 -9.21397819e-11 -1.28427338e-18 -3.61740152e-11 1.53609479e-21 9.14119114e-10 4.38822503e-12 -1.12930583e-14 -4.33162566e-13 -8.96221292e-11 2.9640318e-10 -5.27859423e-10 7.26697258e-10 5.08028847e-07 -2.39768726e-11 -3.67707045e-11 1.22832762e-22 -3.10557949e-13 -1.80472061e-11 5.4819559e-14 3.11208145e-13 2.81085953e-11 -6.44287488e-12 -7.58087169e-12 7.7300244e-10 8.67418117e-12 1.08806075e-14 -4.07879099e-07 -3.26297623e-14 -1.17331511e-21 -1.0583344e-08 -1.91594607e-09 3.43109052e-09 -6.59305697e-17 -1.38562755e-11 2.69827681e-18 -2.7185298e-15 4.97736505e-11 -1.54764292e-11 9.67676357e-08 3.06478543e-14 -3.4867706e-18 -4.84743126e-08 -2.06518996e-10 -1.02259742e-10 -1.065205e-12 1.08692647e-20 -4.56811217e-19 -7.42919042e-07 -1.05723319e-09 -2.74078286e-12 -7.79251524e-11 -3.53398093e-07 2.05838254e-12 2.19814084e-10 -1.40583847e-11 2.53053837e-14 -3.92347399e-09 -1.2113018e-15 -2.07562679e-14 5.03597747e-14 3.30052664e-19 -1.77097746e-11 -2.09022943e-07 5.64232994e-10 -6.01273892e-11 -2.10421227e-08 6.21390824e-14 3.63687075e-12 -3.46613526e-13 -1.20263076e-14 1.50383067e-10 5.6075784e-08 1.71923745e-07 7.84332066e-09 7.30156461e-15 -2.36945885e-09 1.46184037e-19 -4.6874804e-10 3.60163588e-09 1.07804373e-11 -3.47550304e-05 -2.64665736e-15 -6.57272067e-12 -5.57399057e-11 2.00371941e-09 -6.05039512e-24 3.29518574e-12 -2.10656168e-16 1.23353699e-16 6.35256203e-09 6.61605406e-12 -1.06407491e-13 1.36226659e-18 2.34835311e-11 -7.40425943e-09 2.51399142e-14 -5.6447389e-09 -1.01413344e-09 9.35607711e-13 1.70241447e-12 -6.69478172e-14 -4.28094223e-17 -6.2763732e-08 -1.61004212e-13 5.33267944e-11 9.24674932e-18 3.11772697e-09 -1.02690908e-26 1.15428223e-09 6.13593354e-09 1.01360723e-14 -2.79576763e-13 1.58749369e-15 2.11071239e-07 -3.90687402e-23 -1.70076796e-14 9.60660544e-12 2.46312033e-11 -7.57125385e-09 1.61138498e-11 -2.79743062e-10 -1.95835258e-11 -1.44611721e-19 4.48668394e-11 -3.20811545e-17 2.86836721e-10 2.17606246e-11 -1.42154075e-08 1.94826688e-09 -1.18029164e-09 8.67990263e-17 -1.10883374e-08 -1.53209991e-13 4.27551952e-11 -1.00565653e-10 -4.27147293e-12 -2.24072948e-11 9.04793018e-10 1.67024461e-09 4.23086446e-16 -3.18939821e-14 -1.2381518e-08 1.14171801e-12 1.26612023e-16 -1.29260685e-10 2.9005482e-11 -2.90040041e-11 -7.49884727e-16 4.12964454e-11 5.60686588e-13 1.139558e-09 -1.12640821e-12 -5.95677251e-25 7.06730495e-15 3.45266974e-17 -1.33185214e-08 -9.22323063e-14 -1.53338903e-13 -9.4080356e-14 1.37578902e-12 -4.62750602e-11 -7.87056678e-08 -2.79277508e-08 -1.77248559e-12 4.45680368e-11 -5.22674716e-17 -2.04986561e-09 -2.0931199e-09 -6.75505762e-10 2.14886754e-11 -1.19971566e-09 -1.41526924e-14 -5.70816096e-15 6.6438155e-10 -7.85552417e-18 -1.64119722e-13 -1.90533861e-14 4.46328461e-17 1.861932e-10 1.472065e-14 1.01061506e-08 -9.52023703e-12 1.16092043e-08 6.68024842e-17 -6.6832348e-15 -2.66819856e-12 -4.37700098e-09 -2.23559913e-13 -1.40970608e-16 -1.2866052e-16 -9.60751895e-11 2.94305984e-08 -1.03161941e-11 -1.38564132e-10 -2.54800575e-13 -2.11047779e-14 -6.124804e-10 -1.0954718e-11 2.83798983e-12 -6.17455255e-15 9.88049533e-15 4.38854614e-14 -1.222204e-14 2.79905309e-12 2.11863756e-11 -2.06402387e-12 -3.70835623e-23 -5.52808972e-17 1.61073183e-10 -8.03357048e-10 6.07537687e-10 -3.15331149e-13 9.14101678e-17 2.81594031e-27 2.73937427e-12 1.82394305e-24 -3.87105539e-12 -2.15881135e-09 5.76720307e-26 -4.19658001e-14 -1.26371336e-14 -1.09103738e-07 -6.84078102e-16 3.30787079e-17 -8.5627315e-17 2.53166402e-17 -5.2453267e-16 8.69358474e-10 4.46270066e-15 2.4643499e-11 8.26881207e-13 -8.66213906e-11 1.29131537e-20 4.49769319e-11 3.54048665e-11 1.04105702e-13 9.22684507e-08 -1.58036229e-09 2.7553128e-13 -1.19561777e-10 3.19145795e-12 -1.66362008e-10 -8.61209541e-12 6.49155642e-12 1.31079483e-20 -2.26235519e-17 6.7332163e-16
2
5
_type
2
16
torch.CudaTensor
2015-05-16 17:48:05 +12:00
2
4
2016-03-27 21:15:14 +13:00
bias
4
64
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
1
2016-03-27 21:15:14 +13:00
64
1
1
4
65
3
V 1
17
torch.CudaStorage
64
-0.00465938961 -0.0186170209 -0.0384001993 -0.0242348853 -0.0166193303 -0.0282201879 -0.0202509034 -0.0221538022 -0.0240941439 -0.0159848481 -0.0092710983 -0.0235891081 -0.0205170531 -0.0154840788 -0.000616199453 -0.000122559301 -0.0224535 -0.0533247441 -0.0143328877 -0.0363942906 0.0012004443 -0.0170043837 -0.0191572998 -0.0180550124 -0.00832421612 -0.0334892422 -0.0279304329 -0.0192812476 -0.0205119811 -0.019648876 -0.0119138565 -0.0412475951 -0.0371211059 -0.0156169999 -0.0739082322 -0.031776391 -0.0174047444 -0.0253786948 -0.0379793718 -0.0199096762 -0.0324433334 -0.0270304531 -0.024683658 -0.0241938327 -0.0201458875 -0.0212521069 -0.0344552547 0.003032665 -0.0179405566 -0.0297549609 -0.0299403984 -0.0812256411 -0.00151013525 -0.0118636377 -0.0228352621 -0.169153884 -0.0353651606 0.00011363357 -0.0502327271 -0.0231124796 -0.026708737 -0.0195183512 -0.0280230548 -0.0242583565
2015-05-16 17:48:05 +12:00
2
2016-03-27 21:15:14 +13:00
8
gradBias
2016-01-23 13:36:54 +13:00
4
2016-03-27 21:15:14 +13:00
66
3
V 1
16
torch.CudaTensor
2015-05-16 17:48:05 +12:00
1
2016-03-27 21:15:14 +13:00
64
2015-05-16 17:48:05 +12:00
1
1
2016-03-27 21:15:14 +13:00
4
67
3
V 1
17
torch.CudaStorage
64
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
2016-01-23 13:36:54 +13:00
2
2016-03-27 21:15:14 +13:00
4
padH
2016-01-23 13:36:54 +13:00
1
2016-03-27 21:15:14 +13:00
0
2015-05-16 17:48:05 +12:00
1
8
4
2016-03-27 21:15:14 +13:00
68
2015-05-16 17:48:05 +12:00
3
V 1
2016-01-23 13:36:54 +13:00
14
w2nn.LeakyReLU
2015-05-16 17:48:05 +12:00
3
2016-03-27 21:15:14 +13:00
69
6
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
5
train
5
0
2
2016-03-27 21:15:14 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
70
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
71
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-03-27 21:15:14 +13:00
5
_type
2
16
torch.CudaTensor
2
8
negative
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
72
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2016-01-23 13:36:54 +13:00
2
14
negative_scale
1
0.10000000000000001
2015-05-16 17:48:05 +12:00
1
9
4
2016-03-27 21:15:14 +13:00
73
2015-05-16 17:48:05 +12:00
3
V 1
23
nn.SpatialConvolutionMM
2015-05-16 17:48:05 +12:00
3
2016-03-27 21:15:14 +13:00
74
16
2
2016-03-27 21:15:14 +13:00
2
dH
1
1
2
2
dW
1
2015-05-16 17:48:05 +12:00
1
2
11
nInputPlane
1
64
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
75
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2016-03-27 21:15:14 +13:00
2
kH
1
3
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
76
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
1
0
2
2016-01-23 13:36:54 +13:00
2
2016-03-27 21:15:14 +13:00
kW
2016-01-23 13:36:54 +13:00
1
2016-03-27 21:15:14 +13:00
3
2
12
nOutputPlane
2015-05-16 17:48:05 +12:00
1
128
2015-05-16 17:48:05 +12:00
2
4
2016-03-27 21:15:14 +13:00
padW
2015-05-16 17:48:05 +12:00
1
0
2
2016-01-23 13:36:54 +13:00
6
weight
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
77
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
2
128 576
576 1
2015-05-16 17:48:05 +12:00
1
4
2016-03-27 21:15:14 +13:00
78
2016-01-23 13:36:54 +13:00
3
V 1
17
torch.CudaStorage
73728
2016-03-27 21:15:14 +13:00
-0.118653111 -0.071069397 -0.0799584463 -0.0412229896 0.0378869027 -0.0899302736 0.0277772509 0.311533839 -0.141426265 -0.0472674817 -0.182790488 0.0575233251 0.00124118547 0.0455180779 0.0930368453 0.0351994075 0.125507057 -0.380816698 -0.0545882918 0.0585658029 0.00241151336 -0.0783686563 -0.143666103 -0.0935167223 0.134780556 0.13805972 0.0288292356 0.090325512 -0.135856763 -0.429120839 -0.267137945 0.097451359 0.088623777 -0.107788488 0.12277282 0.274293661 0.257907927 0.148142532 0.0906255618 0.0485008024 -0.0784008205 -0.0376295783 0.0603701547 0.154569 -0.0172413252 -0.201370746 0.0207360238 0.0115512563 0.00713348622 -0.0139276721 -0.0768274814 -0.13873978 -0.14569281 -0.202837691 -0.174473256 -0.111772776 0.0227496549 -0.123725668 -0.0622981898 -0.581142128 -0.140650511 0.173400491 0.30631566 0.0680754632 0.0257732868 -0.0421105511 -0.107061788 0.00430816691 -0.117209129 -0.171894819 0.0993765891 0.0145774297 0.239225507 0.00682291389 -0.151837334 0.08246658 -0.0405548811 0.221299142 -0.0236552879 -0.237271622 0.0623420253 -0.658962429 -0.232654005 -0.129805639 -0.0808458552 -0.098947674 -0.0840001926 0.151593238 -0.153830171 -0.359440207 -0.133809343 -0.0580529831 0.242509186 0.00933659356 -0.0763508081 -0.0247192439 0.0677743852 -0.0606296696 -0.299890637 -0.09823405 -0.0819220394 -0.195862129 0.035632059 0.174181432 -0.0649754331 0.102651827 -0.1168871 0.0787134841 0.0263272114 -0.0337550491 0.0307958797 -0.0135853738 -0.0329527929 0.00263664871 0.0958946645 0.086325027 -0.0838624239 0.0219817664 0.15242821 0.0845013186 -0.00384482718 -0.245044872 0.134603098 -0.157630131 -0.200943038 0.114032045 -0.0306818448 -0.237157002 -0.0851958916 -0.149437606 0.285284996 0.247106731 0.143174693 0.0844245031 -0.0163425058 0.0417556614 -0.108268276 -0.0460770428 0.0417641476 -0.0809280723 -0.0190974381 0.0688677132 -0.00717822509 -0.118351817 0.0107778497 -0.0986421332 0.0104418695 -0.0542089902 0.142328769 0.138347402 -0.343988568 0.124455534 -0.0412586778 0.00216036499 0.0317204259 0.0750017092 0.143228367 0.101579651 0.129137099 -0.00247913226 -0.132147536 0.248068482 0.0221564714 0.101767831 0.0782406926 0.0350913741 -0.155677795 0.0257190354 -0.104714908 -0.124200098 -0.15965651 0.0238600299 -0.102583304 -0.116472289 0.0600451566 -0.213625774 -0.0257293191 0.158961236 0.0225786865 0.0304326955 -0.0131583447 -0.0812925771 0.0268867556 -0.00930300541 -0.0524367839 -0.180379078 -0.00120359042 -0.105214596 -0.113788217 0.121171921 -0.102121808 -0.0719182715 -0.0623658895 -0.0327113196 -0.00271654967 -0.179211915 -0.0223313533 -0.112781838 -0.149381012 0.0105097368 -0.0420428701 0.193187013 -0.0726958066 -0.102455303 0.0846149251 -0.21163024 0.0665839538 -0.253226042 -0.0561383925 0.0282096472 -0.158885688 0.00226919143 0.110886864 0.146104366 0.167855933 0.194983512 -0.220263332 -0.256390393 -0.158988103 -0.175460264 -0.0454951972 -0.0258829705 0.0535657369 -0.048241362 -0.040568836 0.0507684685 0.0645691007 0.0147350468 -0.0481627695 0.0409910791 -0.0970233157 0.0559610352 -0.0330822282 -0.0601160824 0.0203210041 -0.263015449 0.0229461528 0.0468567796 -0.241992593 -0.126427591 0.114304692 -0.137898341 0.0114809917 -0.228031859 -0.229149014 -0.0422204882 0.0605425313 -0.0214564279 0.0618217029 -0.113131389 -0.205726877 0.0983068049 0.0145360865 -0.035249833 0.119031742 0.00886798929 -0.00573306903 0.103533261 0.0511868969 0.155117974 0.0923228562 0.0753260478 -0.0471517742 -0.0768074095 0.119951226 -0.0293587744 0.291566551 -0.250726104 -0.359959662 -0.145971358 -0.201682106 0.158170909 0.257610142 -0.0468467139 -0.0563361123 0.107348979 -0.114974923 -0.281412274 -0.111678369 -0.109190181 -0.232184991 -0.0943404883 -0.00317172427 -0.0595260412 0.0517807193 -0.0343020335 -0.324006289 0.0590009764 -0.002243903 0.0788223296 -0.0290128049 0.0290106516 0.0396635868 -0.0806711838 0.0390350968 0.263881117 -0.227525368 0.0287747458 -0.0934681818 0.0887030661 0.0272958521 0.0623776205 0.0727942437 0.165083736 0.16126065 0.0378470607 0.164331019 0.167027354 0.151224136 0.371905565 0.0439097956 0.202683777 0.275458694 0.258781523
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
5
train
5
0
2
10
gradWeight
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
79
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2
128 576
576 1
2016-01-23 13:36:54 +13:00
1
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
80
2016-01-23 13:36:54 +13:00
3
V 1
17
torch.CudaStorage
73728
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
2015-05-16 17:48:05 +12:00
2
2016-03-27 21:15:14 +13:00
5
_type
2
16
torch.CudaTensor
2
4
2016-03-27 21:15:14 +13:00
bias
4
81
3
V 1
16
torch.CudaTensor
2015-05-16 17:48:05 +12:00
1
2016-03-27 21:15:14 +13:00
128
1
1
4
82
3
V 1
17
torch.CudaStorage
128
-0.0371742994 -0.0372894704 -0.00865644682 -0.0189245306 -0.0082264943 -0.00829789508 -0.0120770615 -0.021790646 -0.037724413 -0.126085922 -0.0226399656 -0.0123834834 -0.0169511251 0.00656261947 -0.0214434341 -0.0166387334 -0.0167016555 -0.0170209371 -0.0276082493 -0.0243802629 -0.0148967979 -0.00932010263 -0.022378996 -0.0238606464 -0.02077608 -0.0274037104 -0.0108346501 -0.000484207005 -0.0119011737 -0.0186624695 -0.0115036005 -0.0193161145 -0.0138589488 0.0117075434 -0.013819389 -0.00569867902 -0.0175465364 -0.0161856171 0.0129322242 -0.0250044391 0.00324487546 -0.00544656394 -0.0139395362 -0.00779447937 0.00537628215 -0.0185468849 -0.0293169096 -0.0345238671 -0.0251363553 -0.025927145 -0.00254198932 -0.0142645137 -0.0187210999 -0.0307660531 -0.0118931243 -0.00323167024 -0.0146891288 -0.00879466999 -0.0352980383 -0.0264428444 -0.00875399075 -0.00860267133 -0.0364969745 -0.0391578414 -0.0307189226 -0.0310745705 -0.0211582873 -0.0101688663 -0.0270451475 -0.0543646067 -0.0332784206 -0.0265433155 -0.0240726415 -0.0344454162 -0.0161675885 -0.0148987407 -0.013119638 -0.0430686772 -0.033651419 0.000634694239 -0.0361840203 -0.0202967487 -0.00868562516 -0.00939766038 -0.00579649117 -0.0180322677 -0.0312675349 -0.00380220311 -0.0325238146 -0.0208871625 -0.0011228302 -0.0285480469 -0.0226726718 -0.0542440638 -0.015587993 -0.0175938103 -0.0136417346 -0.0236769207 -0.0191540699 -0.0272555538 -0.0203929711 -0.0303859413 -0.0139227817 -0.0163278598 -0.0247495454 -0.00563903479 -0.0160091519 -0.0220374782 -0.0166779421 -0.0171985738 -0.000207339603 -0.00740047498 -0.0167152286 -0.0327773355 -0.00442904606 -0.0126238707 -0.0625743344 -0.000388777291 -0.00569061982 -0.000424391154 -0.0256587323 -0.0277319085 -0.0250573549 0.00288207945 -0.0175939556 -0.0295562427 -0.00981277414 -0.00908450224
2015-05-16 17:48:05 +12:00
2
2016-03-27 21:15:14 +13:00
8
gradBias
2016-01-23 13:36:54 +13:00
4
2016-03-27 21:15:14 +13:00
83
3
V 1
16
torch.CudaTensor
1
2016-03-27 21:15:14 +13:00
128
1
2015-05-16 17:48:05 +12:00
1
2016-03-27 21:15:14 +13:00
4
84
3
V 1
17
torch.CudaStorage
128
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
2016-01-23 13:36:54 +13:00
2
2016-03-27 21:15:14 +13:00
4
padH
2016-01-23 13:36:54 +13:00
1
2016-03-27 21:15:14 +13:00
0
2015-05-16 17:48:05 +12:00
1
10
4
2016-03-27 21:15:14 +13:00
85
2015-05-16 17:48:05 +12:00
3
V 1
2016-01-23 13:36:54 +13:00
14
w2nn.LeakyReLU
2015-05-16 17:48:05 +12:00
3
2016-03-27 21:15:14 +13:00
86
6
2016-01-23 13:36:54 +13:00
2
2015-05-16 17:48:05 +12:00
5
2016-01-23 13:36:54 +13:00
train
5
0
2015-05-16 17:48:05 +12:00
2
2016-03-27 21:15:14 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
87
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
88
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-03-27 21:15:14 +13:00
5
_type
2
16
torch.CudaTensor
2
8
negative
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
89
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2016-01-23 13:36:54 +13:00
2
14
negative_scale
1
0.10000000000000001
2015-05-16 17:48:05 +12:00
1
11
4
2016-03-27 21:15:14 +13:00
90
2015-05-16 17:48:05 +12:00
3
V 1
23
nn.SpatialConvolutionMM
2015-05-16 17:48:05 +12:00
3
2016-03-27 21:15:14 +13:00
91
16
2
2016-03-27 21:15:14 +13:00
2
dH
1
1
2
2
dW
1
2015-05-16 17:48:05 +12:00
1
2
11
nInputPlane
1
128
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
92
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2016-03-27 21:15:14 +13:00
2
kH
1
3
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
93
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
1
0
2
2016-01-23 13:36:54 +13:00
2
2016-03-27 21:15:14 +13:00
kW
2016-01-23 13:36:54 +13:00
1
2016-03-27 21:15:14 +13:00
3
2015-05-16 17:48:05 +12:00
2
12
nOutputPlane
1
128
2
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
padW
2016-01-23 13:36:54 +13:00
1
0
2015-05-16 17:48:05 +12:00
2
6
weight
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
94
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2
128 1152
1152 1
2016-01-23 13:36:54 +13:00
1
4
2016-03-27 21:15:14 +13:00
95
2016-01-23 13:36:54 +13:00
3
V 1
17
torch.CudaStorage
147456
2016-03-27 21:15:14 +13:00
-0.00141719601 -0.0127637153 0.000439397001 0.176320523 0.0879600272 -0.00518795056 -0.0915635377 0.0415863134 -0.0330117904 -0.033770293 0.0165932123 0.12826702 0.0103427889 -0.107981279 0.0187935065 0.065221414 -0.0484935828 -0.081633158 -0.0184458178 -0.0422593392 -0.0541605502 -0.0235744454 0.0659745932 -0.00904318318 0.0440383814 -0.00277572451 0.0380225144 -0.0179713294 0.100838281 0.0630976781 -0.0740202516 -0.0530418083 0.0133657809 0.0303738341 -0.0066870572 -0.0166045316 -0.0950587392 0.017196916 -0.00886645727 -0.0220833123 0.0151629457 0.00178687612 0.0355332233 -0.0421773754 0.0489639081 0.119060613 -0.0457851104 -0.18767941 -0.0307100601 -0.0240932703 -0.0711900219 -0.0301904362 -0.0562978946 0.00363650895 -0.066147998 -0.0923772156 0.0699603036 -0.0461396985 0.0221241489 0.0930453762 -0.0790743679 0.0189100746 0.00480294647 -0.0326880552 0.0532299243 -0.0477855653 0.0199996289 0.0148623306 -0.0960559398 -0.125830874 -0.00521226414 -0.0846777111 -0.0296965297 -0.094923377 -0.0169389732 0.247021765 -0.106277585 -0.0142743392 -0.0254885964 -0.0807926133 -0.0297115874 -0.0832556412 -0.0631511509 -0.00511156349 0.0865323693 0.0195649527 -0.0361526199 0.00615254696 0.00897474959 0.0304687042 -0.113349319 0.055469349 0.13329114 -0.0293011069 -0.00930645131 0.0562062189 -0.0234219879 0.0107777631 -0.0347306728 -0.021456657 0.0681457743 -0.0244895164 -0.10195642 -0.0256642364 -0.0108876564 -0.105826356 -0.0556525737 -0.0743071511 -0.230993614 -0.0753732026 0.0134445615 -0.0842744187 -0.0416246019 0.0096720336 -0.0787371993 0.0212108474 -0.000278594118 0.0105952965 0.00802254397 -0.0073771677 0.0279029068 0.0463636816 0.0233199336 0.0742509961 0.038773749 0.102761529 0.0265596937 0.0310745351 0.0012000862 0.0557810031 0.0535574369 -0.00114097039 -0.0294602867 0.00272169034 -0.00359896477 0.00868219044 -0.0213374663 -0.0349804424 -0.0130174523 -0.0204333048 0.0551611967 0.00384959159 -0.0482495576 0.0363148823 -0.072509177 0.0683193803 0.0580938384 -0.105699971 0.223347843 0.184123129 0.0407713093 -0.0139115965 0.0563375428 0.0413562022 0.0157898311 -0.0131857116 0.0227186941 0.0617222637 0.00380761223 -0.0521741807 -0.0116539123 0.0160321146 0.0192640275 -0.0698483214 -0.0247527566 0.0161540043 0.0302890614 -0.012225166 -0.00398274092 -0.0196479056 0.0855574682 -0.0231048279 0.139602736 -0.122375511 -0.00538222305 -0.0227475166 0.120035797 0.0274740476 0.0494013913 0.0147250108 -0.150062606 -0.0641366839 0.0145679442 -0.133669361 -0.035697259 0.0603736714 -0.151364103 -0.027786389 -0.0698701888 -0.0593733899 0.0902972072 0.0260075703 0.00151626172 -0.0194156393 0.128368646 0.107346527 -0.024581356 0.0225936994 -0.0133236423 -0.0197616816 0.036005225 0.162658527 -0.0383477509 0.000385276391 -0.0782382935 0.0471234955 0.0812439546 0.0458342656 -0.0971779451 -0.113755189 0.0372963063 0.133373171 -0.0626935661 0.0655874237 0.121113256 0.0488925725 -0.0651348755 -0.0818062052 -0.0105469329 -0.0621500537 -0.0508781858 0.083126463 0.0385767743 -0.115388013 -0.0213059857 -0.0698889792 -0.0927255303 0.0018086785 0.0739485994 0.0319962017 0.0446851775 -0.0915398821 0.0421938486 0.0386442915 0.103333093 0.053278666 -0.114766702 0.0424562804 0.0595650747 0.0163874477 -0.0325019807 -0.0466500521 0.0637105256 0.0092612952 0.0943523124 -0.120070763 -0.00318081817 0.0428945608 0.108895488 0.121996552 0.0858399794 -0.00990780164 -0.00765311019 -0.0723156109 -0.0186570864 -0.0757500827 -0.0512658358 -0.122650631 0.12952055 0.0161259882 0.0190712623 -0.0532140769 0.0368575342 -0.05425255 -0.102142364 -0.02097564 0.0624551326 0.00209467742 0.0459893197 0.0127385035 0.0126973307 0.0296442565 -0.00815721229 -0.0441553816 0.0200783443 -0.0107467026 0.0256109759 -0.00960849784 0.0121189496 -0.0271294471 0.115654722 -0.0505675822 -0.0979069844 0.0516637862 0.0227899235 0.00980306976 -0.0880140439 0.0122808972 -0.132510141 0.00776021555 0.0119624687 0.00731891533 -0.0579788722 -0.00657043606 0.0262473505 0.0457689092 0.109647579 -0.0281704813 -0.128830701 -0.107747518 0.0462525934 -0.0508349799 -0.0545123145 0.0153880138 -0.0222647469 -0.02
2015-05-16 17:48:05 +12:00
2
5
train
5
2015-05-16 17:48:05 +12:00
0
2
2016-01-23 13:36:54 +13:00
10
gradWeight
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
96
2016-01-23 13:36:54 +13:00
3
V 1
16
torch.CudaTensor
2
128 1152
1152 1
1
4
2016-03-27 21:15:14 +13:00
97
2016-01-23 13:36:54 +13:00
3
V 1
17
torch.CudaStorage
147456
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
2
2016-03-27 21:15:14 +13:00
5
_type
2
16
torch.CudaTensor
2
2016-01-23 13:36:54 +13:00
4
2016-03-27 21:15:14 +13:00
bias
4
98
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
1
2016-03-27 21:15:14 +13:00
128
1
1
4
99
3
V 1
17
torch.CudaStorage
128
-0.0996393859 0.0243529491 0.00508114463 0.012061188 -0.00504941307 0.00083173142 -0.106124103 0.0128610497 -0.123735428 -0.0270217452 0.00541166356 -0.00359430816 -0.0923234671 0.0231809244 -0.101851285 -0.0300156381 0.0167918298 0.0257864185 0.0193059463 -0.00361180678 -0.103396051 -0.0991314501 0.035248667 0.0182231031 -0.0113932993 -0.0149814673 -0.107813872 0.00187553978 -0.100595355 -0.0135224471 -0.0136896567 -0.109696776 -0.0605036281 0.00429948047 -0.119173624 -0.100151286 -0.147179231 -0.0116968825 -0.118012309 -0.0164079163 -0.208386064 -0.0211942121 0.0341914296 -0.117905788 -0.0134092877 0.0121155689 -0.0955622941 -0.0302483682 0.0134860305 -0.00708840601 -0.0104591036 0.0196028668 0.0126896091 0.00273986487 -0.0145312883 0.00154092349 -0.0180318952 -0.117588647 0.0228904355 -0.121652298 -0.0951717943 -0.00164854329 0.00759855658 -0.118692793 0.0858375654 -0.0968242958 -0.0508157387 -0.170675799 -0.00812947098 0.0176920276 -0.0155283893 -0.0033827736 -0.12187326 -0.0335042626 -0.0390943959 0.0126515981 -0.0127288988 -0.0871722326 -0.0737278312 -0.00928001571 -0.00589995831 -0.120639369 0.0458348431 0.0141300969 -0.0336264446 0.0200952869 0.00102664938 -0.0970095322 -0.177454621 -0.000830243691 0.0212941039 -0.0219938681 -0.0137722399 0.0167656969 -0.0291415937 -0.115791552 -0.12605454 -0.0883303806 -0.0385115184 0.0171197448 -0.00400050124 -0.0192778409 0.0253002252 0.0114562986 0.00569510879 -0.0844145715 -0.124451973 -0.00443798816 -0.0167711601 0.0137000866 0.0296384189 -0.0279952474 -0.122963369 -0.0130076483 -0.00406019203 -0.0880786106 0.0111882379 0.0171606783 0.00343526341 -0.0120719783 0.0076157921 -0.213954628 -0.150507227 0.00499701919 -0.00747920526 -0.00258418359 -0.0200290009 -0.0582148954
2015-05-16 17:48:05 +12:00
2
2016-03-27 21:15:14 +13:00
8
gradBias
2016-01-23 13:36:54 +13:00
4
2016-03-27 21:15:14 +13:00
100
3
V 1
16
torch.CudaTensor
2015-05-16 17:48:05 +12:00
1
2016-03-27 21:15:14 +13:00
128
2015-05-16 17:48:05 +12:00
1
1
2016-03-27 21:15:14 +13:00
4
101
3
V 1
17
torch.CudaStorage
128
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
2016-01-23 13:36:54 +13:00
2
2016-03-27 21:15:14 +13:00
4
padH
2016-01-23 13:36:54 +13:00
1
2016-03-27 21:15:14 +13:00
0
2015-05-16 17:48:05 +12:00
1
12
4
2016-03-27 21:15:14 +13:00
102
2015-05-16 17:48:05 +12:00
3
V 1
2016-01-23 13:36:54 +13:00
14
w2nn.LeakyReLU
2015-05-16 17:48:05 +12:00
3
2016-03-27 21:15:14 +13:00
103
6
2016-01-23 13:36:54 +13:00
2
5
train
2015-05-16 17:48:05 +12:00
5
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2016-03-27 21:15:14 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
104
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
105
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2
2016-03-27 21:15:14 +13:00
5
_type
2
16
torch.CudaTensor
2
8
negative
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
106
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
0
1
0
2016-01-23 13:36:54 +13:00
2
14
negative_scale
1
0.10000000000000001
2015-05-16 17:48:05 +12:00
1
13
4
2016-03-27 21:15:14 +13:00
107
2015-05-16 17:48:05 +12:00
3
V 1
23
nn.SpatialConvolutionMM
2015-05-16 17:48:05 +12:00
3
2016-03-27 21:15:14 +13:00
108
16
2
2016-03-27 21:15:14 +13:00
2
dH
1
1
2
2
dW
1
2015-05-16 17:48:05 +12:00
1
2
11
nInputPlane
1
128
2
2016-01-23 13:36:54 +13:00
6
output
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
109
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
1
2016-01-23 13:36:54 +13:00
0
2015-05-16 17:48:05 +12:00
2
2016-03-27 21:15:14 +13:00
2
kH
1
3
2
2016-01-23 13:36:54 +13:00
9
gradInput
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
110
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
0
1
0
2
2016-01-23 13:36:54 +13:00
2
2016-03-27 21:15:14 +13:00
kW
2016-01-23 13:36:54 +13:00
1
2016-03-27 21:15:14 +13:00
3
2016-01-23 13:36:54 +13:00
2
12
nOutputPlane
2015-05-16 17:48:05 +12:00
1
1
2
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
padW
2015-05-16 17:48:05 +12:00
1
0
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
6
weight
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
111
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2016-01-23 13:36:54 +13:00
2
1 1152
1152 1
2015-05-16 17:48:05 +12:00
1
4
2016-03-27 21:15:14 +13:00
112
2016-01-23 13:36:54 +13:00
3
V 1
17
torch.CudaStorage
1152
2016-03-27 21:15:14 +13:00
-0.80660212 -0.13665314 -0.314549804 0.0714655817 -0.090988718 0.782145083 0.284852237 0.580417693 -0.602311075 -0.5544222 0.74342078 -0.0617433898 0.522846997 -0.106599584 -0.710478187 -0.178975105 -0.1362672 0.552709401 0.247187316 0.235754505 0.0323451795 -0.0900526196 -0.487253845 0.163703337 -0.307918668 0.337688744 -0.173799202 -0.0627170354 0.113458455 0.20820947 -0.29087916 -0.0534902252 -0.806493819 -0.124149106 0.546840489 0.462312579 -0.0733138844 -0.098850131 0.0489163324 0.178424954 -0.0908752605 -0.141160503 -0.0293845031 0.33496508 -0.101105899 -0.251679271 0.252402335 -0.102631286 -0.054050006 0.114330478 -0.100703694 0.18680492 -0.122614115 0.0709194764 0.123661041 0.34027496 -0.296229422 -0.0872428343 -0.287883878 -0.238914385 0.11017289 0.383867621 0.0597532801 0.385881484 -0.518052995 -0.315303445 -0.0476253293 0.237283573 0.192193702 0.307055384 0.127158865 0.492506891 -0.0872098953 0.490095824 -0.241322368 0.134307295 -0.160556257 -0.890225828 -0.208896682 -0.504583716 -0.0390542932 0.0353048332 0.153034195 0.0102606313 -0.0409306437 -0.141035438 -0.0390056483 -0.0721253306 0.114798062 -0.0152723435 -0.306442618 0.089165099 0.265537798 0.487098575 0.0750181898 -0.437934607 -0.175897613 -0.150605708 0.204330295 -0.349838912 0.0978336334 -0.130835906 -0.125655651 0.061970856 0.309255064 0.136623785 0.0617852472 -0.0517537594 -0.342297941 0.678776562 0.0411888212 -0.0470124334 -0.337120473 0.743343353 -0.216494173 -0.324434042 -0.351816297 -0.178270146 0.583370745 -0.371351212 -0.123741508 0.0776412636 -0.129999936 -0.0123965079 -0.0578914247 0.218834251 -0.73664391 0.12266583 -0.936042547 0.133549288 0.0852005109 0.167041928 0.102911264 0.310970813 0.303121656 0.0698498115 -0.0183155909 -0.0237366389 0.102262035 0.185097069 0.0065605226 -0.263454169 -0.00957732182 -0.0480718687 0.657061338 -0.0265807882 -0.397736847 -0.981202543 0.627294958 -0.0592594557 -0.0658690706 0.0718916506 0.183349818 -0.108927689 0.758396506 -0.127527356 0.501594365 0.093924962 -0.383778065 -0.191440374 -0.202213913 -0.30514425 0.00361463125 0.0262554679 0.0529492944 0.00125394238 -0.033307299 0.148249239 -0.0476456136 -0.0913797542 -0.0580990091 0.605627954 -0.505911469 0.0555939712 0.225160301 -0.402765602 0.336846858 -0.435117811 -0.0110924216 0.154454559 -0.507102191 0.349092454 -0.199366778 -0.160989329 -0.0900763571 -0.524003267 0.113943972 0.375012517 0.239860579 0.450263113 -0.431277305 0.308791906 0.262494981 0.0959624723 -0.381540596 -0.481217355 0.199460492 -0.578858912 -0.450632066 0.0258793347 0.0832435191 0.339188665 -0.180083647 0.0126290172 -0.212293431 0.241872668 0.121154949 0.127272174 -0.00732452562 -0.0881032422 -0.453591049 -0.131044328 0.143130109 0.415115386 0.182097897 -0.103474259 -0.167930618 -0.0364870802 0.0869158655 0.276237935 -0.0945868045 -0.0445415452 -0.0420151539 0.011505683 -0.00239238143 -0.101807915 -0.110124633 0.06639456 0.0213878453 0.0745547861 0.0900242925 -0.00213472941 -0.012307954 -0.0408998877 -0.0988051668 0.143828884 -0.401758015 0.394369781 -0.294139296 0.253941357 0.037222743 -0.401935309 0.245013237 0.173624605 -0.00791228283 -0.0175479315 0.038321238 -0.0768742189 -0.0657942817 0.00588405039 -0.0472311154 -0.00439034821 0.0100687975 -0.0140019581 0.0897070393 0.0484989695 -0.31317243 -0.16041021 0.114405438 0.210774824 0.040691264 -0.000706639607 0.00771662639 -0.0573148727 -0.0712422356 -0.104149163 0.155612603 -0.0820546821 -0.0294657424 0.158570707 -0.0898777843 -0.46188587 -0.188156694 0.452506393 -0.235588938 0.30768013 -0.591781974 0.404405177 0.374304682 0.454993814 -0.0382428393 0.0266306438 -0.0854683891 -0.413419932 0.0292988028 -0.827567756 -0.0669441149 -0.0121144494 0.141747862 0.247521043 0.954077482 -0.623497427 -0.190593168 0.050915774 0.0578927025 0.249087915 -0.679487407 0.0306292269 -0.00948201492 0.357697189 -0.0549493432 -0.141694188 -0.0913837701 -0.0139999269 -0.0370303094 0.0219933651 0.204330355 -1.40651214 -0.1605459 -0.0454135612 0.501464367 -0.148578525 0.615702271 -0.0730716884 -0.494573236 -0.337374389 0.222016439 0.295208484 0.488005936 0.13
2015-05-16 17:48:05 +12:00
2
2016-01-23 13:36:54 +13:00
5
train
5
0
2
10
gradWeight
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
113
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
2
1 1152
1152 1
2016-01-23 13:36:54 +13:00
1
4
2016-03-27 21:15:14 +13:00
114
2016-01-23 13:36:54 +13:00
3
V 1
17
torch.CudaStorage
1152
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
2015-05-16 17:48:05 +12:00
2
2016-03-27 21:15:14 +13:00
5
_type
2
16
torch.CudaTensor
2
4
bias
2015-05-16 17:48:05 +12:00
4
2016-03-27 21:15:14 +13:00
115
2015-05-16 17:48:05 +12:00
3
V 1
16
torch.CudaTensor
1
2016-03-27 21:15:14 +13:00
1
1
1
2016-01-23 13:36:54 +13:00
4
2016-03-27 21:15:14 +13:00
116
3
V 1
17
torch.CudaStorage
2015-05-16 17:48:05 +12:00
1
2016-03-27 21:15:14 +13:00
0.0883794129
2015-05-16 17:48:05 +12:00
2
2016-03-27 21:15:14 +13:00
8
gradBias
4
117
3
V 1
16
torch.CudaTensor
2015-05-16 17:48:05 +12:00
1
1
2016-01-23 13:36:54 +13:00
1
2016-03-27 21:15:14 +13:00
1
4
118
2016-01-23 13:36:54 +13:00
3
2016-03-27 21:15:14 +13:00
V 1
17
torch.CudaStorage
1
0
2
4
padH
1
0
2015-05-16 17:48:05 +12:00
1
14
4
2016-03-27 21:15:14 +13:00
119
2015-05-16 17:48:05 +12:00
3
V 1
7
nn.View
3
2016-03-27 21:15:14 +13:00
120
7
2016-01-23 13:36:54 +13:00
2
2016-03-27 21:15:14 +13:00
11
numElements
1
2016-01-23 13:36:54 +13:00
1
2016-03-27 21:15:14 +13:00
2
5
_type
2
16
torch.CudaTensor
2
6
output
4
3
2
9
gradInput
4
121
2016-01-23 13:36:54 +13:00
3
2016-03-27 21:15:14 +13:00
V 1
16
torch.CudaTensor
0
1
0
2015-05-16 17:48:05 +12:00
2
4
size
4
2016-03-27 21:15:14 +13:00
122
3
V 1
17
torch.LongStorage
1
-1
2
2016-03-27 21:15:14 +13:00
12
numInputDims
1
3
2
2016-01-23 13:36:54 +13:00
5
train
5
2015-05-16 17:48:05 +12:00
0
2
5
train
5
0