-
Notifications
You must be signed in to change notification settings - Fork 22
/
Copy pathmodels.py
78 lines (57 loc) · 2.65 KB
/
models.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
## TODO: define the convolutional neural network architecture
import torch
from torch.autograd import Variable
import torch.nn as nn
import torch.nn.functional as F
# can use the below import should you choose to initialize the weights of your Net
import torch.nn.init as I
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
# Covolutional Layers
self.conv1 = nn.Conv2d(in_channels = 1, out_channels = 32, kernel_size = 5)
self.conv2 = nn.Conv2d(in_channels = 32, out_channels = 64, kernel_size = 3)
self.conv3 = nn.Conv2d(in_channels = 64, out_channels = 128, kernel_size = 3)
self.conv4 = nn.Conv2d(in_channels = 128, out_channels = 256, kernel_size = 2)
# Maxpooling Layer
self.pool = nn.MaxPool2d(kernel_size = 2, stride = 2)
# Fully Connected Layers
self.fc1 = nn.Linear(in_features = 36864, out_features = 1000) # The number of input gained by "print("Flatten size: ", x.shape)" in below
self.fc2 = nn.Linear(in_features = 1000, out_features = 1000)
self.fc3 = nn.Linear(in_features = 1000, out_features = 136) # the output 136 in order to having 2 for each of the 68 keypoint (x, y) pairs
# Dropouts
self.drop1 = nn.Dropout(p = 0.1)
self.drop2 = nn.Dropout(p = 0.2)
self.drop3 = nn.Dropout(p = 0.3)
self.drop4 = nn.Dropout(p = 0.4)
self.drop5 = nn.Dropout(p = 0.5)
self.drop6 = nn.Dropout(p = 0.6)
def forward(self, x):
# First - Convolution + Activation + Pooling + Dropout
x = self.conv1(x)
x = F.relu(x)
x = self.pool(x)
x = self.drop1(x)
#print("First size: ", x.shape)
# Second - Convolution + Activation + Pooling + Dropout
x = self.drop2(self.pool(F.relu(self.conv2(x))))
#print("Second size: ", x.shape)
# Third - Convolution + Activation + Pooling + Dropout
x = self.drop3(self.pool(F.relu(self.conv3(x))))
#print("Third size: ", x.shape)
# Forth - Convolution + Activation + Pooling + Dropout
x = self.drop4(self.pool(F.relu(self.conv4(x))))
#print("Forth size: ", x.shape)
# Flattening the layer
x = x.view(x.size(0), -1)
#print("Flatten size: ", x.shape)
# First - Dense + Activation + Dropout
x = self.drop5(F.relu(self.fc1(x)))
#print("First dense size: ", x.shape)
# Second - Dense + Activation + Dropout
x = self.drop6(F.relu(self.fc2(x)))
#print("Second dense size: ", x.shape)
# Final Dense Layer
x = self.fc3(x)
#print("Final dense size: ", x.shape)
return x