-
Notifications
You must be signed in to change notification settings - Fork 42
/
multiple_scores.py
51 lines (38 loc) · 1.52 KB
/
multiple_scores.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
import time
from sklearn.model_selection import cross_val_score
from sklearn.ensemble import GradientBoostingRegressor
from sklearn.datasets import load_diabetes
from hyperactive import Hyperactive
data = load_diabetes()
X, y = data.data, data.target
"""
Hyperactive cannot handle multi objective optimization.
But we can achive something similar with a workaround.
The following example searches for the highest cv-score and the lowest training time.
It is possible by creating an objective/score from those two variables.
You can also return additional parameters to track the cv-score and training time separately.
"""
def model(opt):
gbr = GradientBoostingRegressor(
n_estimators=opt["n_estimators"],
max_depth=opt["max_depth"],
min_samples_split=opt["min_samples_split"],
)
c_time = time.time()
scores = cross_val_score(gbr, X, y, cv=3)
train_time = time.time() - c_time
cv_score = scores.mean()
# you can create a score that is a composition of two objectives
score = cv_score / train_time
# instead of just returning the score you can also return the score + a dict
return score, {"training_time": train_time, "cv_score": cv_score}
search_space = {
"n_estimators": list(range(10, 150, 5)),
"max_depth": list(range(2, 12)),
"min_samples_split": list(range(2, 22)),
}
hyper = Hyperactive()
hyper.add_search(model, search_space, n_iter=20)
hyper.run()
# The variables from the dict are collected in the results.
print("\n Results \n", hyper.search_data(model))