-
Notifications
You must be signed in to change notification settings - Fork 2
/
classifier.py
112 lines (85 loc) · 3.49 KB
/
classifier.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
import torch.nn as nn
from torch.nn.utils.weight_norm import weight_norm
from fc import FCNet, GTH, get_act, get_norm
class SimpleClassifier(nn.Module):
def __init__(self, in_dim, hid_dim, out_dim, norm, act, dropout=0.5):
super(SimpleClassifier, self).__init__()
norm_layer = get_norm(norm)
act_layer = get_act(act)
layers = [
norm_layer(nn.Linear(in_dim, hid_dim), dim=None),
act_layer(),
nn.Dropout(dropout, inplace=False),
norm_layer(nn.Linear(hid_dim, out_dim), dim=None)
]
self.main = nn.Sequential(*layers)
def forward(self, x):
logits = self.main(x)
return logits
class PaperClassifier(nn.Module):
def __init__(self, in_dim, hid_dim_1, hid_dim_2, out_dim, norm, act, dropout=0.5):
super(PaperClassifier, self).__init__()
no_norm = lambda x, dim: x
if norm == 'weight':
norm_layer = weight_norm
elif norm == 'batch':
norm_layer = nn.BatchNorm1d
elif norm == 'layer':
norm_layer = nn.LayerNorm
elif norm == 'none':
norm_layer = no_norm
else:
print("Invalid Normalization")
raise Exception("Invalid Normalization")
self.gated_tanh_1 = GTH(in_dim=in_dim, out_dim=hid_dim_1, dropout=dropout, norm=norm, act=act)
self.gated_tanh_2 = GTH(in_dim=in_dim, out_dim=hid_dim_2, dropout=dropout, norm=norm, act=act)
self.linear_1 = norm_layer(nn.Linear(hid_dim_1, out_dim), dim=None)
self.linear_2 = norm_layer(nn.Linear(hid_dim_2, out_dim), dim=None)
def forward(self, x):
v_1 = self.gated_tanh_1(x)
v_2 = self.gated_tanh_2(x)
v_1 = self.linear_1(v_1)
v_2 = self.linear_2(v_2)
logits = v_1 + v_2
return logits
class PaperClassifier1(nn.Module):
def __init__(self, in_dim, hid_dim_1, hid_dim_2, out_dim, norm, act, dropout=0.5):
super(PaperClassifier1, self).__init__()
no_norm = lambda x, dim: x
if norm == 'weight':
norm_layer = weight_norm
elif norm == 'batch':
norm_layer = nn.BatchNorm1d
elif norm == 'layer':
norm_layer = nn.LayerNorm
elif norm == 'none':
norm_layer = no_norm
else:
print("Invalid Normalization")
raise Exception("Invalid Normalization")
self.gated_tanh_1 = FCNet([in_dim, hid_dim_1], dropout=dropout, norm=norm, act=act)
self.gated_tanh_2 = FCNet([in_dim, hid_dim_2], dropout=dropout, norm=norm, act=act)
self.linear_1 = norm_layer(nn.Linear(hid_dim_1, out_dim), dim=None)
self.linear_2 = norm_layer(nn.Linear(hid_dim_2, out_dim), dim=None)
def forward(self, x):
v_1 = self.gated_tanh_1(x)
v_2 = self.gated_tanh_2(x)
v_1 = self.linear_1(v_1)
v_2 = self.linear_2(v_2)
logits = v_1 + v_2
return logits
class ImageClassifier(nn.Module):
def __init__(self, in_dim, hid_dim, out_dim, norm, act, dropout=0.5):
super(ImageClassifier, self).__init__()
norm_layer = get_norm(norm)
act_layer = get_act(act)
layers = [
norm_layer(nn.Linear(in_dim, hid_dim), dim=None),
act_layer(),
nn.Dropout(dropout, inplace=False),
norm_layer(nn.Linear(hid_dim, out_dim), dim=None)
]
self.main = nn.Sequential(*layers)
def forward(self, x):
logits = self.main(x)
return logits