@@ -162,14 +162,14 @@ def test_backward_pass(net_arguments, mnist):
162
162
163
163
images , labels = iter (mnist_train ).next ()
164
164
165
- def loss_grad (layer ):
165
+ def loss_grad (layer , expected ):
166
166
gradients = []
167
167
168
168
for b in range (4 ):
169
169
row = []
170
170
for j in range (10 ):
171
171
result = layer .result .data [j , b ]
172
- if j == labels [b ]:
172
+ if j == expected [b ]:
173
173
result -= 1
174
174
row .append (result )
175
175
gradients .append (row )
@@ -180,7 +180,7 @@ def loss_grad(layer):
180
180
181
181
for i in range (get_run_count ()):
182
182
net .forward (images .numpy ())
183
- net .backward (loss_grad )
183
+ net .backward (labels , loss_grad )
184
184
185
185
criterion = nn .CrossEntropyLoss ()
186
186
@@ -221,14 +221,14 @@ def run_training(net_arguments, mnist):
221
221
222
222
images , labels = iter (mnist_train ).next ()
223
223
224
- def loss_grad (layer ):
224
+ def loss_grad (layer , expected ):
225
225
gradients = []
226
226
227
227
for b in range (4 ):
228
228
row = []
229
229
for j in range (10 ):
230
230
result = layer .result .data [j , b ]
231
- if j == labels [b ]:
231
+ if j == expected [b ]:
232
232
result -= 1
233
233
row .append (result )
234
234
gradients .append (row )
@@ -245,7 +245,7 @@ def loss_grad(layer):
245
245
compare (outputs , nn .Softmax (dim = 1 )(pytorch_outputs ),
246
246
1e-12 )
247
247
248
- net .backward (loss_grad , optimizer )
248
+ net .backward (labels , loss_grad , optimizer )
249
249
250
250
pytorch_loss = criterion (pytorch_outputs , labels )
251
251
pytorch_loss .backward ()
0 commit comments