Commit | Line | Data |
---|---|---|
83dc35bd S |
1 | name: "DL_FR" |
2 | input: "data" | |
3 | input_dim:1 | |
4 | input_dim:3 | |
5 | input_dim:61 | |
6 | input_dim:61 | |
7 | ################# laye-1 | |
8 | layers { | |
9 | bottom: "data" | |
10 | top: "conv1/3x3_s2_1" | |
11 | name: "conv1/3x3_s2_1" | |
12 | type: CONVOLUTION | |
13 | convolution_param { | |
14 | num_output: 64 | |
15 | kernel_size: 3 | |
16 | stride: 2 | |
17 | } | |
18 | } | |
19 | layers { | |
20 | bottom: "conv1/3x3_s2_1" | |
21 | top: "conv1/3x3_s2_1" | |
22 | name: "conv1/relu_3x3" | |
23 | type: PRELU | |
24 | prelu_param { | |
25 | channel_shared: false | |
26 | } | |
27 | } | |
28 | ||
29 | layers { | |
30 | name: "conv1/norm1" | |
31 | type: LRN | |
32 | bottom: "conv1/3x3_s2_1" | |
33 | top: "conv1/norm1" | |
34 | lrn_param { | |
35 | local_size: 3 | |
36 | alpha: 0.0001 | |
37 | beta: 0.75 | |
38 | } | |
39 | } | |
40 | ||
41 | #####################layer-2 | |
42 | ||
43 | layers { | |
44 | bottom: "conv1/norm1" | |
45 | top: "conv2/3x3_s2_1" | |
46 | name: "conv2/3x3_s2_1" | |
47 | type: CONVOLUTION | |
48 | convolution_param { | |
49 | num_output: 128 | |
50 | kernel_size: 3 | |
51 | stride: 2 | |
52 | } | |
53 | } | |
54 | layers { | |
55 | bottom: "conv2/3x3_s2_1" | |
56 | top: "conv2/3x3_s2_1" | |
57 | name: "conv2/relu_3x3" | |
58 | type: PRELU | |
59 | prelu_param { | |
60 | channel_shared: false | |
61 | } | |
62 | } | |
63 | ||
64 | layers { | |
65 | name: "conv2/norm1" | |
66 | type: LRN | |
67 | bottom: "conv2/3x3_s2_1" | |
68 | top: "conv2/norm1" | |
69 | lrn_param { | |
70 | local_size: 3 | |
71 | alpha: 0.0001 | |
72 | beta: 0.75 | |
73 | } | |
74 | } | |
75 | ||
76 | ||
77 | ##################################### layer-3 | |
78 | ||
79 | layers { | |
80 | bottom: "conv2/norm1" | |
81 | top: "conv3/2x2_s2_1" | |
82 | name: "conv3/2x2_s2_1" | |
83 | type: CONVOLUTION | |
84 | convolution_param { | |
85 | num_output: 256 | |
86 | kernel_size: 2 | |
87 | stride: 2 | |
88 | } | |
89 | } | |
90 | layers { | |
91 | bottom: "conv3/2x2_s2_1" | |
92 | top: "conv3/2x2_s2_1" | |
93 | name: "conv3/relu_2x2_1" | |
94 | type: PRELU | |
95 | prelu_param { | |
96 | channel_shared: false | |
97 | } | |
98 | } | |
99 | layers { | |
100 | bottom: "conv3/2x2_s2_1" | |
101 | top: "conv3/2x2_s1_2" | |
102 | name: "conv3/2x2_s1_2" | |
103 | type: CONVOLUTION | |
104 | convolution_param { | |
105 | num_output: 384 | |
106 | kernel_size: 2 | |
107 | pad: 1 | |
108 | stride: 1 | |
109 | } | |
110 | } | |
111 | layers { | |
112 | bottom: "conv3/2x2_s1_2" | |
113 | top: "conv3/2x2_s1_2" | |
114 | name: "conv3/relu_2x2_2" | |
115 | type: PRELU | |
116 | prelu_param { | |
117 | channel_shared: false | |
118 | } | |
119 | } | |
120 | #####################layer-4 | |
121 | ||
122 | layers { | |
123 | bottom: "conv3/2x2_s1_2" | |
124 | top: "conv4/2x2_s1_1" | |
125 | name: "conv4/2x2_s1_1" | |
126 | type: CONVOLUTION | |
127 | convolution_param { | |
128 | num_output: 256 | |
129 | kernel_size: 2 | |
130 | stride: 1 | |
131 | } | |
132 | } | |
133 | layers { | |
134 | bottom: "conv4/2x2_s1_1" | |
135 | top: "conv4/2x2_s1_1" | |
136 | name: "conv4/relu_2x2_1" | |
137 | type: PRELU | |
138 | prelu_param { | |
139 | channel_shared: false | |
140 | } | |
141 | } | |
142 | ||
143 | layers { | |
144 | bottom: "conv4/2x2_s1_1" | |
145 | top: "conv4/2x2_s1_2" | |
146 | name: "conv4/2x2_s1_2" | |
147 | type: CONVOLUTION | |
148 | convolution_param { | |
149 | num_output: 128 | |
150 | kernel_size: 2 | |
151 | pad: 1 | |
152 | stride: 1 | |
153 | } | |
154 | } | |
155 | layers { | |
156 | bottom: "conv4/2x2_s1_2" | |
157 | top: "conv4/2x2_s1_2" | |
158 | name: "conv4/relu_2x2_2" | |
159 | type: PRELU | |
160 | prelu_param { | |
161 | channel_shared: false | |
162 | } | |
163 | } | |
164 | ##############4x4 bin | |
165 | layers { | |
166 | name: "pool4_1" | |
167 | type: POOLING | |
168 | bottom: "conv4/2x2_s1_2" | |
169 | top: "pool4_1" | |
170 | pooling_param { | |
171 | pool: MAX | |
172 | kernel_size: 2 | |
173 | stride: 2 | |
174 | } | |
175 | } | |
176 | ||
177 | layers { | |
178 | name: "pool4_1_flatten" | |
179 | type: FLATTEN | |
180 | bottom: "pool4_1" | |
181 | top: "pool4_1_flatten" | |
182 | } | |
183 | ||
184 | ||
185 | ###############2*2 bin | |
186 | ||
187 | layers { | |
188 | name: "pool4_2" | |
189 | type: POOLING | |
190 | bottom: "conv4/2x2_s1_2" | |
191 | top: "pool4_2" | |
192 | pooling_param { | |
193 | pool: MAX | |
194 | kernel_size: 4 | |
195 | stride: 4 | |
196 | } | |
197 | } | |
198 | ||
199 | layers { | |
200 | name: "pool4_2_flatten" | |
201 | type: FLATTEN | |
202 | bottom: "pool4_2" | |
203 | top: "pool4_2_flatten" | |
204 | } | |
205 | ||
206 | ||
207 | #############1*1 bin | |
208 | ||
209 | layers { | |
210 | name: "pool4_3" | |
211 | type: POOLING | |
212 | bottom: "conv4/2x2_s1_2" | |
213 | top: "pool4_3" | |
214 | pooling_param { | |
215 | pool: MAX | |
216 | kernel_size: 8 | |
217 | stride: 8 | |
218 | } | |
219 | } | |
220 | ||
221 | layers { | |
222 | name: "pool4_3_flatten" | |
223 | type: FLATTEN | |
224 | bottom: "pool4_3" | |
225 | top: "pool4_3_flatten" | |
226 | } | |
227 | ||
228 | layers { | |
229 | bottom: "pool4_1_flatten" | |
230 | bottom: "pool4_2_flatten" | |
231 | bottom: "pool4_3_flatten" | |
232 | top: "pool4_spp" | |
233 | name: "pool4_spp" | |
234 | type: CONCAT | |
235 | } | |
236 | ||
237 | ||
238 | ##########################fc-5 | |
239 | ||
240 | layers { | |
241 | name: "fc5" | |
242 | type: INNER_PRODUCT | |
243 | bottom: "pool4_spp" | |
244 | top: "fc5" | |
245 | inner_product_param { | |
246 | num_output: 512 | |
247 | } | |
248 | } | |
249 | layers { | |
250 | name: "relu5" | |
251 | type: PRELU | |
252 | bottom: "fc5" | |
253 | top: "fc5" | |
254 | prelu_param { | |
255 | channel_shared: false | |
256 | } | |
257 | } | |
258 | #layers { | |
259 | # name: "drop5" | |
260 | # type: DROPOUT | |
261 | # bottom: "fc5" | |
262 | # top: "fc5" | |
263 | # dropout_param { | |
264 | # dropout_ratio: 0.2 | |
265 | # } | |
266 | #} | |
267 | ||
268 | ##############fc-6 | |
269 | layers { | |
270 | name: "fc6" | |
271 | type: INNER_PRODUCT | |
272 | bottom: "fc5" | |
273 | top: "fc6" | |
274 | inner_product_param { | |
275 | num_output: 256 | |
276 | } | |
277 | } | |
278 | layers { | |
279 | name: "relu6" | |
280 | type: PRELU | |
281 | bottom: "fc6" | |
282 | top: "fc6" | |
283 | prelu_param { | |
284 | channel_shared: false | |
285 | } | |
286 | } | |
287 | #layers { | |
288 | # name: "drop6" | |
289 | # type: DROPOUT | |
290 | # bottom: "fc6" | |
291 | # top: "fc6" | |
292 | # dropout_param { | |
293 | # dropout_ratio: 0.1 | |
294 | # } | |
295 | #} | |
296 | #layers { | |
297 | # name: "fc7_face64" | |
298 | # type: INNER_PRODUCT | |
299 | # bottom: "fc6" | |
300 | # top: "fc7_face64" | |
301 | # inner_product_param { | |
302 | # num_output: 2193 | |
303 | # } | |
304 | #} | |
305 | #layers { | |
306 | # name: "accuracy_top1" | |
307 | # type: ACCURACY | |
308 | # bottom: "fc7_face64" | |
309 | # bottom: "label" | |
310 | # top: "accuracy_top1" | |
311 | # accuracy_param { | |
312 | # top_k: 1 | |
313 | # } | |
314 | # include: { phase: TEST } | |
315 | #} | |
316 | #layers { | |
317 | # name: "loss" | |
318 | # type: SOFTMAX_LOSS | |
319 | # bottom: "fc7_face64" | |
320 | # bottom: "label" | |
321 | # top: "loss" | |
322 | #} |