-
Notifications
You must be signed in to change notification settings - Fork 4
/
Copy pathresnet50Layers.m
356 lines (315 loc) · 16.9 KB
/
resnet50Layers.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
function lgraph = resnet50Layers()
% resnet50Layers ResNet-50 layer graph
%
% lgraph = resnet50Layers creates a layer graph with the network
% architecture of ResNet-50. The layer graph contains no weights.
lgraph = layerGraph();
%% Add Layer Branches
% Add the branches of the network to the layer graph. Each branch is a linear
% array of layers.
tempLayers = [
imageInputLayer([224 224 3],"Name","input_1")
convolution2dLayer([7 7],64,"Name","conv1","Padding",[3 3 3 3],"Stride",[2 2])
batchNormalizationLayer("Name","bn_conv1","Epsilon",0.001)
reluLayer("Name","activation_1_relu")
maxPooling2dLayer([3 3],"Name","max_pooling2d_1","Padding",[1 1 1 1],"Stride",[2 2])];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
convolution2dLayer([1 1],256,"Name","res2a_branch1","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn2a_branch1","Epsilon",0.001)];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
convolution2dLayer([1 1],64,"Name","res2a_branch2a","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn2a_branch2a","Epsilon",0.001)
reluLayer("Name","activation_2_relu")
convolution2dLayer([3 3],64,"Name","res2a_branch2b","BiasLearnRateFactor",0,"Padding","same")
batchNormalizationLayer("Name","bn2a_branch2b","Epsilon",0.001)
reluLayer("Name","activation_3_relu")
convolution2dLayer([1 1],256,"Name","res2a_branch2c","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn2a_branch2c","Epsilon",0.001)];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
additionLayer(2,"Name","add_1")
reluLayer("Name","activation_4_relu")];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
convolution2dLayer([1 1],64,"Name","res2b_branch2a","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn2b_branch2a","Epsilon",0.001)
reluLayer("Name","activation_5_relu")
convolution2dLayer([3 3],64,"Name","res2b_branch2b","BiasLearnRateFactor",0,"Padding","same")
batchNormalizationLayer("Name","bn2b_branch2b","Epsilon",0.001)
reluLayer("Name","activation_6_relu")
convolution2dLayer([1 1],256,"Name","res2b_branch2c","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn2b_branch2c","Epsilon",0.001)];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
additionLayer(2,"Name","add_2")
reluLayer("Name","activation_7_relu")];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
convolution2dLayer([1 1],64,"Name","res2c_branch2a","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn2c_branch2a","Epsilon",0.001)
reluLayer("Name","activation_8_relu")
convolution2dLayer([3 3],64,"Name","res2c_branch2b","BiasLearnRateFactor",0,"Padding","same")
batchNormalizationLayer("Name","bn2c_branch2b","Epsilon",0.001)
reluLayer("Name","activation_9_relu")
convolution2dLayer([1 1],256,"Name","res2c_branch2c","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn2c_branch2c","Epsilon",0.001)];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
additionLayer(2,"Name","add_3")
reluLayer("Name","activation_10_relu")];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
convolution2dLayer([1 1],128,"Name","res3a_branch2a","BiasLearnRateFactor",0,"Stride",[2 2])
batchNormalizationLayer("Name","bn3a_branch2a","Epsilon",0.001)
reluLayer("Name","activation_11_relu")
convolution2dLayer([3 3],128,"Name","res3a_branch2b","BiasLearnRateFactor",0,"Padding","same")
batchNormalizationLayer("Name","bn3a_branch2b","Epsilon",0.001)
reluLayer("Name","activation_12_relu")
convolution2dLayer([1 1],512,"Name","res3a_branch2c","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn3a_branch2c","Epsilon",0.001)];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
convolution2dLayer([1 1],512,"Name","res3a_branch1","BiasLearnRateFactor",0,"Stride",[2 2])
batchNormalizationLayer("Name","bn3a_branch1","Epsilon",0.001)];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
additionLayer(2,"Name","add_4")
reluLayer("Name","activation_13_relu")];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
convolution2dLayer([1 1],128,"Name","res3b_branch2a","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn3b_branch2a","Epsilon",0.001)
reluLayer("Name","activation_14_relu")
convolution2dLayer([3 3],128,"Name","res3b_branch2b","BiasLearnRateFactor",0,"Padding","same")
batchNormalizationLayer("Name","bn3b_branch2b","Epsilon",0.001)
reluLayer("Name","activation_15_relu")
convolution2dLayer([1 1],512,"Name","res3b_branch2c","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn3b_branch2c","Epsilon",0.001)];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
additionLayer(2,"Name","add_5")
reluLayer("Name","activation_16_relu")];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
convolution2dLayer([1 1],128,"Name","res3c_branch2a","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn3c_branch2a","Epsilon",0.001)
reluLayer("Name","activation_17_relu")
convolution2dLayer([3 3],128,"Name","res3c_branch2b","BiasLearnRateFactor",0,"Padding","same")
batchNormalizationLayer("Name","bn3c_branch2b","Epsilon",0.001)
reluLayer("Name","activation_18_relu")
convolution2dLayer([1 1],512,"Name","res3c_branch2c","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn3c_branch2c","Epsilon",0.001)];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
additionLayer(2,"Name","add_6")
reluLayer("Name","activation_19_relu")];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
convolution2dLayer([1 1],128,"Name","res3d_branch2a","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn3d_branch2a","Epsilon",0.001)
reluLayer("Name","activation_20_relu")
convolution2dLayer([3 3],128,"Name","res3d_branch2b","BiasLearnRateFactor",0,"Padding","same")
batchNormalizationLayer("Name","bn3d_branch2b","Epsilon",0.001)
reluLayer("Name","activation_21_relu")
convolution2dLayer([1 1],512,"Name","res3d_branch2c","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn3d_branch2c","Epsilon",0.001)];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
additionLayer(2,"Name","add_7")
reluLayer("Name","activation_22_relu")];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
convolution2dLayer([1 1],256,"Name","res4a_branch2a","BiasLearnRateFactor",0,"Stride",[2 2])
batchNormalizationLayer("Name","bn4a_branch2a","Epsilon",0.001)
reluLayer("Name","activation_23_relu")
convolution2dLayer([3 3],256,"Name","res4a_branch2b","BiasLearnRateFactor",0,"Padding","same")
batchNormalizationLayer("Name","bn4a_branch2b","Epsilon",0.001)
reluLayer("Name","activation_24_relu")
convolution2dLayer([1 1],1024,"Name","res4a_branch2c","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn4a_branch2c","Epsilon",0.001)];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
convolution2dLayer([1 1],1024,"Name","res4a_branch1","BiasLearnRateFactor",0,"Stride",[2 2])
batchNormalizationLayer("Name","bn4a_branch1","Epsilon",0.001)];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
additionLayer(2,"Name","add_8")
reluLayer("Name","activation_25_relu")];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
convolution2dLayer([1 1],256,"Name","res4b_branch2a","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn4b_branch2a","Epsilon",0.001)
reluLayer("Name","activation_26_relu")
convolution2dLayer([3 3],256,"Name","res4b_branch2b","BiasLearnRateFactor",0,"Padding","same")
batchNormalizationLayer("Name","bn4b_branch2b","Epsilon",0.001)
reluLayer("Name","activation_27_relu")
convolution2dLayer([1 1],1024,"Name","res4b_branch2c","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn4b_branch2c","Epsilon",0.001)];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
additionLayer(2,"Name","add_9")
reluLayer("Name","activation_28_relu")];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
convolution2dLayer([1 1],256,"Name","res4c_branch2a","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn4c_branch2a","Epsilon",0.001)
reluLayer("Name","activation_29_relu")
convolution2dLayer([3 3],256,"Name","res4c_branch2b","BiasLearnRateFactor",0,"Padding","same")
batchNormalizationLayer("Name","bn4c_branch2b","Epsilon",0.001)
reluLayer("Name","activation_30_relu")
convolution2dLayer([1 1],1024,"Name","res4c_branch2c","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn4c_branch2c","Epsilon",0.001)];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
additionLayer(2,"Name","add_10")
reluLayer("Name","activation_31_relu")];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
convolution2dLayer([1 1],256,"Name","res4d_branch2a","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn4d_branch2a","Epsilon",0.001)
reluLayer("Name","activation_32_relu")
convolution2dLayer([3 3],256,"Name","res4d_branch2b","BiasLearnRateFactor",0,"Padding","same")
batchNormalizationLayer("Name","bn4d_branch2b","Epsilon",0.001)
reluLayer("Name","activation_33_relu")
convolution2dLayer([1 1],1024,"Name","res4d_branch2c","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn4d_branch2c","Epsilon",0.001)];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
additionLayer(2,"Name","add_11")
reluLayer("Name","activation_34_relu")];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
convolution2dLayer([1 1],256,"Name","res4e_branch2a","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn4e_branch2a","Epsilon",0.001)
reluLayer("Name","activation_35_relu")
convolution2dLayer([3 3],256,"Name","res4e_branch2b","BiasLearnRateFactor",0,"Padding","same")
batchNormalizationLayer("Name","bn4e_branch2b","Epsilon",0.001)
reluLayer("Name","activation_36_relu")
convolution2dLayer([1 1],1024,"Name","res4e_branch2c","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn4e_branch2c","Epsilon",0.001)];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
additionLayer(2,"Name","add_12")
reluLayer("Name","activation_37_relu")];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
convolution2dLayer([1 1],256,"Name","res4f_branch2a","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn4f_branch2a","Epsilon",0.001)
reluLayer("Name","activation_38_relu")
convolution2dLayer([3 3],256,"Name","res4f_branch2b","BiasLearnRateFactor",0,"Padding","same")
batchNormalizationLayer("Name","bn4f_branch2b","Epsilon",0.001)
reluLayer("Name","activation_39_relu")
convolution2dLayer([1 1],1024,"Name","res4f_branch2c","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn4f_branch2c","Epsilon",0.001)];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
additionLayer(2,"Name","add_13")
reluLayer("Name","activation_40_relu")];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
convolution2dLayer([1 1],512,"Name","res5a_branch2a","BiasLearnRateFactor",0,"Stride",[2 2])
batchNormalizationLayer("Name","bn5a_branch2a","Epsilon",0.001)
reluLayer("Name","activation_41_relu")
convolution2dLayer([3 3],512,"Name","res5a_branch2b","BiasLearnRateFactor",0,"Padding","same")
batchNormalizationLayer("Name","bn5a_branch2b","Epsilon",0.001)
reluLayer("Name","activation_42_relu")
convolution2dLayer([1 1],2048,"Name","res5a_branch2c","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn5a_branch2c","Epsilon",0.001)];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
convolution2dLayer([1 1],2048,"Name","res5a_branch1","BiasLearnRateFactor",0,"Stride",[2 2])
batchNormalizationLayer("Name","bn5a_branch1","Epsilon",0.001)];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
additionLayer(2,"Name","add_14")
reluLayer("Name","activation_43_relu")];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
convolution2dLayer([1 1],512,"Name","res5b_branch2a","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn5b_branch2a","Epsilon",0.001)
reluLayer("Name","activation_44_relu")
convolution2dLayer([3 3],512,"Name","res5b_branch2b","BiasLearnRateFactor",0,"Padding","same")
batchNormalizationLayer("Name","bn5b_branch2b","Epsilon",0.001)
reluLayer("Name","activation_45_relu")
convolution2dLayer([1 1],2048,"Name","res5b_branch2c","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn5b_branch2c","Epsilon",0.001)];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
additionLayer(2,"Name","add_15")
reluLayer("Name","activation_46_relu")];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
convolution2dLayer([1 1],512,"Name","res5c_branch2a","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn5c_branch2a","Epsilon",0.001)
reluLayer("Name","activation_47_relu")
convolution2dLayer([3 3],512,"Name","res5c_branch2b","BiasLearnRateFactor",0,"Padding","same")
batchNormalizationLayer("Name","bn5c_branch2b","Epsilon",0.001)
reluLayer("Name","activation_48_relu")
convolution2dLayer([1 1],2048,"Name","res5c_branch2c","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","bn5c_branch2c","Epsilon",0.001)];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
additionLayer(2,"Name","add_16")
reluLayer("Name","activation_49_relu")
globalAveragePooling2dLayer("Name","avg_pool")
fullyConnectedLayer(1000,"Name","fc1000","BiasLearnRateFactor",0)
softmaxLayer("Name","fc1000_softmax")
classificationLayer("Name","ClassificationLayer_fc1000")];
lgraph = addLayers(lgraph,tempLayers);
%% Connect Layer Branches
% Connect all the branches of the network to create the network graph.
lgraph = connectLayers(lgraph,"max_pooling2d_1","res2a_branch1");
lgraph = connectLayers(lgraph,"max_pooling2d_1","res2a_branch2a");
lgraph = connectLayers(lgraph,"bn2a_branch1","add_1/in2");
lgraph = connectLayers(lgraph,"bn2a_branch2c","add_1/in1");
lgraph = connectLayers(lgraph,"activation_4_relu","res2b_branch2a");
lgraph = connectLayers(lgraph,"activation_4_relu","add_2/in2");
lgraph = connectLayers(lgraph,"bn2b_branch2c","add_2/in1");
lgraph = connectLayers(lgraph,"activation_7_relu","res2c_branch2a");
lgraph = connectLayers(lgraph,"activation_7_relu","add_3/in2");
lgraph = connectLayers(lgraph,"bn2c_branch2c","add_3/in1");
lgraph = connectLayers(lgraph,"activation_10_relu","res3a_branch2a");
lgraph = connectLayers(lgraph,"activation_10_relu","res3a_branch1");
lgraph = connectLayers(lgraph,"bn3a_branch1","add_4/in2");
lgraph = connectLayers(lgraph,"bn3a_branch2c","add_4/in1");
lgraph = connectLayers(lgraph,"activation_13_relu","res3b_branch2a");
lgraph = connectLayers(lgraph,"activation_13_relu","add_5/in2");
lgraph = connectLayers(lgraph,"bn3b_branch2c","add_5/in1");
lgraph = connectLayers(lgraph,"activation_16_relu","res3c_branch2a");
lgraph = connectLayers(lgraph,"activation_16_relu","add_6/in2");
lgraph = connectLayers(lgraph,"bn3c_branch2c","add_6/in1");
lgraph = connectLayers(lgraph,"activation_19_relu","res3d_branch2a");
lgraph = connectLayers(lgraph,"activation_19_relu","add_7/in2");
lgraph = connectLayers(lgraph,"bn3d_branch2c","add_7/in1");
lgraph = connectLayers(lgraph,"activation_22_relu","res4a_branch2a");
lgraph = connectLayers(lgraph,"activation_22_relu","res4a_branch1");
lgraph = connectLayers(lgraph,"bn4a_branch1","add_8/in2");
lgraph = connectLayers(lgraph,"bn4a_branch2c","add_8/in1");
lgraph = connectLayers(lgraph,"activation_25_relu","res4b_branch2a");
lgraph = connectLayers(lgraph,"activation_25_relu","add_9/in2");
lgraph = connectLayers(lgraph,"bn4b_branch2c","add_9/in1");
lgraph = connectLayers(lgraph,"activation_28_relu","res4c_branch2a");
lgraph = connectLayers(lgraph,"activation_28_relu","add_10/in2");
lgraph = connectLayers(lgraph,"bn4c_branch2c","add_10/in1");
lgraph = connectLayers(lgraph,"activation_31_relu","res4d_branch2a");
lgraph = connectLayers(lgraph,"activation_31_relu","add_11/in2");
lgraph = connectLayers(lgraph,"bn4d_branch2c","add_11/in1");
lgraph = connectLayers(lgraph,"activation_34_relu","res4e_branch2a");
lgraph = connectLayers(lgraph,"activation_34_relu","add_12/in2");
lgraph = connectLayers(lgraph,"bn4e_branch2c","add_12/in1");
lgraph = connectLayers(lgraph,"activation_37_relu","res4f_branch2a");
lgraph = connectLayers(lgraph,"activation_37_relu","add_13/in2");
lgraph = connectLayers(lgraph,"bn4f_branch2c","add_13/in1");
lgraph = connectLayers(lgraph,"activation_40_relu","res5a_branch2a");
lgraph = connectLayers(lgraph,"activation_40_relu","res5a_branch1");
lgraph = connectLayers(lgraph,"bn5a_branch1","add_14/in2");
lgraph = connectLayers(lgraph,"bn5a_branch2c","add_14/in1");
lgraph = connectLayers(lgraph,"activation_43_relu","res5b_branch2a");
lgraph = connectLayers(lgraph,"activation_43_relu","add_15/in2");
lgraph = connectLayers(lgraph,"bn5b_branch2c","add_15/in1");
lgraph = connectLayers(lgraph,"activation_46_relu","res5c_branch2a");
lgraph = connectLayers(lgraph,"activation_46_relu","add_16/in2");
lgraph = connectLayers(lgraph,"bn5c_branch2c","add_16/in1");
end