Skip to content

Commit 5bf067f

Browse files
author
Maksymilian Graczyk
committedAug 27, 2020
Change network tests to reflect updates to backward()
1 parent c5fec26 commit 5bf067f

File tree

2 files changed

+8
-8
lines changed

2 files changed

+8
-8
lines changed
 

‎tests/test_lenet.py

+6-6
Original file line numberDiff line numberDiff line change
@@ -162,14 +162,14 @@ def test_backward_pass(net_arguments, mnist):
162162

163163
images, labels = iter(mnist_train).next()
164164

165-
def loss_grad(layer):
165+
def loss_grad(layer, expected):
166166
gradients = []
167167

168168
for b in range(4):
169169
row = []
170170
for j in range(10):
171171
result = layer.result.data[j, b]
172-
if j == labels[b]:
172+
if j == expected[b]:
173173
result -= 1
174174
row.append(result)
175175
gradients.append(row)
@@ -180,7 +180,7 @@ def loss_grad(layer):
180180

181181
for i in range(get_run_count()):
182182
net.forward(images.numpy())
183-
net.backward(loss_grad)
183+
net.backward(labels, loss_grad)
184184

185185
criterion = nn.CrossEntropyLoss()
186186

@@ -221,14 +221,14 @@ def run_training(net_arguments, mnist):
221221

222222
images, labels = iter(mnist_train).next()
223223

224-
def loss_grad(layer):
224+
def loss_grad(layer, expected):
225225
gradients = []
226226

227227
for b in range(4):
228228
row = []
229229
for j in range(10):
230230
result = layer.result.data[j, b]
231-
if j == labels[b]:
231+
if j == expected[b]:
232232
result -= 1
233233
row.append(result)
234234
gradients.append(row)
@@ -245,7 +245,7 @@ def loss_grad(layer):
245245
compare(outputs, nn.Softmax(dim=1)(pytorch_outputs),
246246
1e-12)
247247

248-
net.backward(loss_grad, optimizer)
248+
net.backward(labels, loss_grad, optimizer)
249249

250250
pytorch_loss = criterion(pytorch_outputs, labels)
251251
pytorch_loss.backward()

‎tests/test_simple.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -129,7 +129,7 @@ def test_backward_pass(net_arguments):
129129
dtype=np.float64)
130130
expected = np.array([2, 1])
131131

132-
def loss_grad(layer):
132+
def loss_grad(layer, expected):
133133
gradients = []
134134

135135
for b in range(2):
@@ -145,7 +145,7 @@ def loss_grad(layer):
145145

146146
for i in range(get_run_count()):
147147
net.forward(input_data)
148-
net.backward(loss_grad)
148+
net.backward(expected, loss_grad)
149149

150150
criterion = nn.CrossEntropyLoss()
151151

0 commit comments

Comments
 (0)