-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathmetrics.py
39 lines (29 loc) · 1.21 KB
/
metrics.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
from __future__ import division
from torch.autograd import Variable
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import Metric
class Loss(Metric):
"""
Calculates the average loss according to the passed loss_fn.
`loss_fn` must return the average loss over all observations in the batch.
`update` must receive output of the form (y_pred, y).
Note: This is to support pytorch 0.4, it's already on their PR but not yet merged
"""
def __init__(self, loss_fn, output_transform=lambda x: x):
super(Loss, self).__init__(output_transform)
self._loss_fn = loss_fn
def reset(self):
self._sum = 0
self._num_examples = 0
def update(self, output):
y_pred, y = output
average_loss = self._loss_fn(y_pred, y)
assert len(average_loss.shape) == 0, "`loss_fn` did not return the average loss"
self._sum += average_loss.item() * y.shape[0]
self._num_examples += y.shape[0]
def compute(self):
if self._num_examples == 0:
raise NotComputableError(
"Loss must have at least one example before it can be computed"
)
return self._sum / self._num_examples