-
Notifications
You must be signed in to change notification settings - Fork 0
/
Alexnet.py
121 lines (89 loc) · 3.71 KB
/
Alexnet.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
from keras.models import Sequential
from keras.layers import Dense, Activation, Dropout, Flatten, Conv2D, MaxPooling2D
from keras.layers.normalization import BatchNormalization
import numpy as np
import pandas as pd
from sklearn import preprocessing
class Alexnet:
def __init__(self):
np.random.seed(1000)
self.model = self.create_model()
self.compile_model()
def create_model(self):
model = Sequential()
# 1st Convolutional Layer
model.add(Conv2D(filters=96, input_shape=(28, 28, 1), kernel_size=(11, 11), strides=(1, 1), padding='valid'))
model.add(Activation('relu'))
# Pooling
# model.add(MaxPooling2D(pool_size=(2, 2), strides=(2, 2), padding='valid'))
# Batch Normalisation before passing it to the next layer
model.add(BatchNormalization())
# 2nd Convolutional Layer
model.add(Conv2D(filters=256, kernel_size=(11, 11), strides=(1, 1), padding='valid'))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2), strides=(1, 1), padding='valid'))
model.add(BatchNormalization())
# 3rd Convolutional Layer
model.add(Conv2D(filters=384, kernel_size=(3, 3), strides=(1, 1), padding='valid'))
model.add(Activation('relu'))
model.add(BatchNormalization())
# 4rd Convolutional Layer
model.add(Conv2D(filters=384, kernel_size=(3, 3), strides=(1, 1), padding='valid'))
model.add(Activation('relu'))
model.add(BatchNormalization())
# 5th Convolutional Layer
model.add(Conv2D(filters=256, kernel_size=(3, 3), strides=(1, 1), padding='valid'))
model.add(Activation('relu'))
model.add(BatchNormalization())
model.add(Flatten())
# 1st Dense Layer
model.add(Dense(4096, input_shape=(28 * 28 * 1,)))
model.add(Activation('relu'))
# Add Dropout to prevent overfitting
model.add(Dropout(0.4))
model.add(BatchNormalization())
# 2nd Dense Layer
model.add(Dense(4096))
model.add(Activation('relu'))
model.add(Dropout(0.4))
model.add(BatchNormalization())
# 3rd Dense Layer
model.add(Dense(1000))
model.add(Activation('relu'))
model.add(Dropout(0.4))
model.add(BatchNormalization())
# Output Layer
model.add(Dense(10))
model.add(Activation('softmax'))
model.summary()
return model
def compile_model(self):
self.model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
def train(self, X_train, Y_train):
self.model.fit(X_train, Y_train, batch_size=64, epochs=1, verbose=1,
validation_split=0.2, shuffle=True)
def predict(self, X_test):
return self.model.predict_classes(X_test, verbose=1)
def save(self):
self.model.save_weights("weights/alexnet-weights9.h5")
def load(self):
self.model.load_weights("weights/alexnet-weights.h5")
def read_data():
train = pd.read_csv("train.csv").values
test = pd.read_csv("test.csv").values
return train, test
if __name__ == "__main__":
train, test = read_data()
trainX = train[:, 1:].reshape(train.shape[0], 28, 28, 1).astype('float32')
X_train = trainX / 255.0
Y_train = train[:, 0]
lb = preprocessing.LabelBinarizer()
Y_train = lb.fit_transform(Y_train)
X_test = test.reshape(test.shape[0], 28, 28, 1)
alexnet = Alexnet()
# alexnet.train(X_train, Y_train)
# alexnet.save()
alexnet.load()
preds = alexnet.predict(X_test)
pd.DataFrame({"ImageId": list(range(1, len(preds) + 1)), "Label": preds})\
.to_csv("predicts.cvs", index=False, header=True)