-
Notifications
You must be signed in to change notification settings - Fork 17
/
Copy pathmodels.py
executable file
·122 lines (102 loc) · 4.08 KB
/
models.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
import sys
sys.path.append('../../')
import torch_ard as nn_ard
from torch import nn
import torch.nn.functional as F
import torch
class DenseModelARD(nn.Module):
def __init__(self, input_shape, output_shape, hidden_size=150, activation=None):
super(DenseModelARD, self).__init__()
self.l1 = nn_ard.LinearARD(input_shape, hidden_size)
self.l2 = nn_ard.LinearARD(hidden_size, output_shape)
self.activation = activation
self._init_weights()
def forward(self, input):
x = input.to(self.device)
x = self.l1(x)
x = nn.functional.tanh(x)
x = self.l2(x)
if self.activation: x = self.activation(x)
return x
def _init_weights(self):
for layer in self.children():
if hasattr(layer, 'weight'): nn.init.xavier_uniform(layer.weight, gain=nn.init.calculate_gain('relu'))
@property
def device(self):
return next(self.parameters()).device
class DenseModel(nn.Module):
def __init__(self, input_shape, output_shape, hidden_size=150, activation=None):
super(DenseModel, self).__init__()
self.l1 = nn.Linear(input_shape, hidden_size)
self.l2 = nn.Linear(hidden_size, output_shape)
self.activation = activation
self._init_weights()
def forward(self, input):
x = input.to(self.device)
x = self.l1(x)
x = nn.functional.tanh(x)
x = self.l2(x)
if self.activation: x = self.activation(x)
return x
def _init_weights(self):
for layer in self.children():
if hasattr(layer, 'weight'): nn.init.xavier_uniform(layer.weight, gain=nn.init.calculate_gain('relu'))
@property
def device(self):
return next(self.parameters()).device
class LeNet(nn.Module):
def __init__(self, input_shape, output_shape):
super(LeNet, self).__init__()
self.conv1 = nn.Conv2d(input_shape, 20, 5)
self.conv2 = nn.Conv2d(20, 50, 5)
self.l1 = nn.Linear(50*5*5, 500)
self.l2 = nn.Linear(500, output_shape)
self._init_weights()
def forward(self, x):
out = F.relu(self.conv1(x.to(self.device)))
out = F.max_pool2d(out, 2)
out = F.relu(self.conv2(out))
out = F.max_pool2d(out, 2)
out = out.view(out.shape[0], -1)
out = F.relu(self.l1(out))
return self.l2(out)
# return F.log_softmax(self.l2(out), dim=1)
def _init_weights(self):
for layer in self.children():
if hasattr(layer, 'weight'): nn.init.xavier_uniform(layer.weight, gain=nn.init.calculate_gain('relu'))
@property
def device(self):
return next(self.parameters()).device
class LeNetARD(nn.Module):
def __init__(self, input_shape, output_shape):
super(LeNetARD, self).__init__()
self.conv1 = nn_ard.Conv2dARD(input_shape, 20, 5)
self.conv2 = nn_ard.Conv2dARD(20, 50, 5)
self.l1 = nn_ard.LinearARD(50*5*5, 500)
self.l2 = nn_ard.LinearARD(500, output_shape)
self._init_weights()
def forward(self, input):
out = F.relu(self.conv1(input.to(self.device)))
out = F.max_pool2d(out, 2)
out = F.relu(self.conv2(out))
out = F.max_pool2d(out, 2)
out = out.view(out.shape[0], -1)
out = F.relu(self.l1(out))
return self.l2(out)
# return F.log_softmax(self.l2(out), dim=1)
def _init_weights(self):
for layer in self.children():
if hasattr(layer, 'weight'): nn.init.xavier_uniform(layer.weight, gain=nn.init.calculate_gain('relu'))
@property
def device(self):
return next(self.parameters()).device
class LeNet_MNIST(LeNet):
def __init__(self, input_shape, output_shape):
super(LeNet_MNIST, self).__init__(input_shape, output_shape)
self.l1 = nn.Linear(50*4*4, 500)
super(LeNet_MNIST, self)._init_weights()
class LeNetARD_MNIST(LeNetARD):
def __init__(self, input_shape, output_shape):
super(LeNetARD_MNIST, self).__init__(input_shape, output_shape)
self.l1 = nn_ard.LinearARD(50*4*4, 500)
super(LeNetARD_MNIST, self)._init_weights()