-
Notifications
You must be signed in to change notification settings - Fork 6
/
Copy pathoperation.py
96 lines (78 loc) · 3.45 KB
/
operation.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
import os
import sys
import torch
import torch.autograd as autograd
import torch.nn.functional as F
def train(train_iter, dev_iter, model, args):
model.train()
optimizer = torch.optim.Adam(model.parameters(), lr=args.lr)
steps = 0
best_acc = 0
last_step = 0
for epoch in range(1, args.epochs+1):
for batch in train_iter:
feature, target = batch.text, batch.label
feature.t_(), target.sub_(1) # batch first, index align
if args.cuda:
feature, target = feature.cuda(), target.cuda()
optimizer.zero_grad()
logit = model(feature)
loss = F.cross_entropy(logit, target)
loss.backward()
optimizer.step()
steps += 1
if steps % args.log_interval == 0:
corrects = (torch.max(logit, 1)[1].view(
target.size()).data == target.data).sum()
accuracy = 100.0 * float(corrects) / batch.batch_size
sys.stdout.write('\rBatch[{}] - loss: {:.6f} acc: {:.3f}%({}/{})'.format(steps,
loss.data,
accuracy,
corrects,
batch.batch_size))
if steps % args.test_interval == 0:
dev_acc = test(dev_iter, model, args)
if dev_acc > best_acc:
best_acc = dev_acc
last_step = steps
save(model, args.save_dir, 'best', steps)
if steps % args.save_interval == 0:
save(model, args.save_dir, 'snapshot', steps)
def test(data_iter, model, args):
model.eval()
corrects, avg_loss = 0, 0
for batch in data_iter:
feature, target = batch.text, batch.label
feature.t_(), target.sub_(1) # batch first, index align
if args.cuda:
feature, target = feature.cuda(), target.cuda()
logit = model(feature)
loss = F.cross_entropy(logit, target, size_average=False)
avg_loss += loss.data
corrects += (torch.max(logit, 1)
[1].view(target.size()).data == target.data).sum()
size = len(data_iter.dataset)
avg_loss /= size
accuracy = 100.0 * float(corrects) / size
print('Evaluation - loss: {:.6f} acc: {:.3f}% ({}/{}) \n'.format(avg_loss,
accuracy,
corrects,
size))
return accuracy
def predict(text, model, text_field, label_feild, cuda_flag):
assert isinstance(text, str)
model.eval()
text = text_field.preprocess(text)
text = [[text_field.vocab.stoi[x] for x in text]]
x = torch.tensor(text)
x = autograd.Variable(x)
if cuda_flag:
x = x.cuda()
output = model(x)
_, predicted = torch.max(output, 1)
return label_feild.vocab.itos[predicted.data + 1]
def save(model, save_dir, save_prefix, steps):
if not os.path.isdir(save_dir):
os.makedirs(save_dir)
save_path = os.path.join(save_dir, '{}_steps_{}.pt'.format(save_prefix, steps))
torch.save(model.state_dict(), save_path)