-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathExpRecord.txt
103 lines (90 loc) · 4.03 KB
/
ExpRecord.txt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
0: original
eval.py:
2017-11-04 15:31:23.863139: precision @ 1 = 0.865, druation = 3.720944
error.py:
2017-11-04 16:26:03.209799: precision @ 1 = 0.873, druation = 16.999680
2017-11-04 16:26:03.209876: mean error after softmax = 0.303
2017-11-04 16:26:03.209899: mean error without softmax = 50.351
2017-11-04 16:26:03.209909: max error after softmax = 2.000
2017-11-04 16:26:03.209916: max error without softmax = 130.822
1:
layerAndBit = {"conv1/weights:0": 4,
"conv2/weights:0": 4}
eval.py:
2017-11-04 14:03:08.048171: precision @ 1 = 0.846, druation = 3.665714
error.py:
2017-11-04 16:27:12.885328: precision @ 1 = 0.838, druation = 17.040151
2017-11-04 16:27:12.885384: mean error after softmax = 0.350
2017-11-04 16:27:12.885396: mean error without softmax = 41.865
2017-11-04 16:27:12.885404: max error after softmax = 2.000
2017-11-04 16:27:12.885411: max error without softmax = 116.396
2:
layerAndBit = {"conv1/weights:0": 4,
"conv2/weights:0": 4,
"local3/weights:0": 4,
"local4/weights:0": 4,
"softmax_linear/weights:0": 4}
eval.py:
2017-11-04 15:31:01.568903: precision @ 1 = 0.789, druation = 3.514027
error.py:
2017-11-04 16:27:54.463797: precision @ 1 = 0.789, druation = 17.217553
2017-11-04 16:27:54.463934: mean error after softmax = 0.505
2017-11-04 16:27:54.463948: mean error without softmax = 30.492
2017-11-04 16:27:54.463963: max error after softmax = 2.000
2017-11-04 16:27:54.463973: max error without softmax = 81.346
3:
layerAndBit = {"conv1/weights:0": 2,
"conv2/weights:0": 2,
"local3/weights:0": 2,
"local4/weights:0": 2,
"softmax_linear/weights:0": 2}
eval.py:
2017-11-04 16:56:05.320873: precision @ 1 = 0.188, druation = 3.628469
error.py:
2017-11-04 16:55:06.257904: precision @ 1 = 0.175, druation = 17.067567
2017-11-04 16:55:06.257979: mean error after softmax = 1.751
2017-11-04 16:55:06.257991: mean error without softmax = 4.419
2017-11-04 16:55:06.258003: max error after softmax = 1.987
2017-11-04 16:55:06.258009: max error without softmax = 10.731
4:
layerAndBit = {"conv1/weights:0": 3,
"conv2/weights:0": 3,
"local3/weights:0": 3,
"local4/weights:0": 3,
"softmax_linear/weights:0": 3}
eval.py:
2017-11-04 17:54:17.588597: precision @ 1 = 0.440, druation = 5.493508
error.py:
2017-11-04 17:53:34.737130: precision @ 1 = 0.429, druation = 25.752606
2017-11-04 17:53:34.737214: mean error after softmax = 1.387
2017-11-04 17:53:34.737230: mean error without softmax = 10.987
2017-11-04 17:53:34.737240: max error after softmax = 1.996
2017-11-04 17:53:34.737247: max error without softmax = 30.019
5:
layerAndBit = {"conv1/weights:0": 6,
"conv2/weights:0": 6,
"local3/weights:0": 6,
"local4/weights:0": 6,
"softmax_linear/weights:0": 6}
eval.py:
2017-11-04 19:33:15.941640: precision @ 1 = 0.859, druation = 3.530235
error.py:
2017-11-04 19:33:50.251955: precision @ 1 = 0.867, druation = 16.937442
2017-11-04 19:33:50.252076: mean error after softmax = 0.312
2017-11-04 19:33:50.252096: mean error without softmax = 48.037
2017-11-04 19:33:50.252109: max error after softmax = 2.000
2017-11-04 19:33:50.252116: max error without softmax = 136.130
6:
layerAndBit = {"conv1/weights:0": 10,
"conv2/weights:0": 10,
"local3/weights:0": 10,
"local4/weights:0": 10,
"softmax_linear/weights:0": 10}
eval.py:
2017-11-05 10:21:56.840416: precision @ 1 = 0.866, druation = 3.916633
error.py:
2017-11-05 10:22:50.336931: precision @ 1 = 0.871, druation = 16.301383
2017-11-05 10:22:50.337050: mean error after softmax = 0.303
2017-11-05 10:22:50.337070: mean error without softmax = 50.475
2017-11-05 10:22:50.337082: max error after softmax = 2.000
2017-11-05 10:22:50.337089: max error without softmax = 126.614