-
Notifications
You must be signed in to change notification settings - Fork 177
/
keras_simple.py
99 lines (77 loc) · 2.96 KB
/
keras_simple.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
"""
Optuna example that optimizes a neural network classifier configuration for the
MNIST dataset using Keras.
In this example, we optimize the validation accuracy of MNIST classification using
Keras. We optimize the filter and kernel size, kernel stride and layer activation.
"""
import urllib
import optuna
from keras.backend import clear_session
from keras.datasets import mnist
from keras.layers import Conv2D
from keras.layers import Dense
from keras.layers import Flatten
from keras.layers import Input
from keras.models import Sequential
from keras.optimizers import RMSprop
# TODO(crcrpar): Remove the below three lines once everything is ok.
# Register a global custom opener to avoid HTTP Error 403: Forbidden when downloading MNIST.
opener = urllib.request.build_opener()
opener.addheaders = [("User-agent", "Mozilla/5.0")]
urllib.request.install_opener(opener)
N_TRAIN_EXAMPLES = 3000
N_VALID_EXAMPLES = 1000
BATCHSIZE = 128
CLASSES = 10
EPOCHS = 10
def objective(trial):
# Clear clutter from previous Keras session graphs.
clear_session()
(x_train, y_train), (x_valid, y_valid) = mnist.load_data()
img_x, img_y = x_train.shape[1], x_train.shape[2]
x_train = x_train.reshape(-1, img_x, img_y, 1)[:N_TRAIN_EXAMPLES].astype("float32") / 255
x_valid = x_valid.reshape(-1, img_x, img_y, 1)[:N_VALID_EXAMPLES].astype("float32") / 255
y_train = y_train[:N_TRAIN_EXAMPLES]
y_valid = y_valid[:N_VALID_EXAMPLES]
input_shape = (img_x, img_y, 1)
model = Sequential()
model.add(Input(shape=input_shape))
model.add(
Conv2D(
filters=trial.suggest_categorical("filters", [32, 64]),
kernel_size=trial.suggest_categorical("kernel_size", [3, 5]),
strides=trial.suggest_categorical("strides", [1, 2]),
activation=trial.suggest_categorical("activation", ["relu", "linear"]),
)
)
model.add(Flatten())
model.add(Dense(CLASSES, activation="softmax"))
# We compile our model with a sampled learning rate.
learning_rate = trial.suggest_float("learning_rate", 1e-5, 1e-1, log=True)
model.compile(
loss="sparse_categorical_crossentropy",
optimizer=RMSprop(learning_rate=learning_rate),
metrics=["accuracy"],
)
model.fit(
x_train,
y_train,
validation_data=(x_valid, y_valid),
shuffle=True,
batch_size=BATCHSIZE,
epochs=EPOCHS,
verbose=False,
)
# Evaluate the model accuracy on the validation set.
score = model.evaluate(x_valid, y_valid, verbose=0)
return score[1]
if __name__ == "__main__":
study = optuna.create_study(direction="maximize")
study.optimize(objective, n_trials=100, timeout=600)
print("Number of finished trials: {}".format(len(study.trials)))
print("Best trial:")
trial = study.best_trial
print(" Value: {}".format(trial.value))
print(" Params: ")
for key, value in trial.params.items():
print(" {}: {}".format(key, value))