-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathHpAlgorithms.py
49 lines (36 loc) · 1.81 KB
/
HpAlgorithms.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
"""Base module for the default configuration of all employed machine learning algorithms"""
from sklearn.ensemble import AdaBoostClassifier, GradientBoostingClassifier,GradientBoostingRegressor,AdaBoostRegressor
from sklearn.tree import DecisionTreeClassifier,DecisionTreeRegressor
def getAdaBoostBDTClassifier(options={}):
"""the standard BDT classifer based on AdaBoost"""
dt = DecisionTreeClassifier(criterion="gini",
max_depth=5,
min_samples_leaf=0.05,
random_state=0)
bdt = AdaBoostClassifier(dt,
n_estimators=200,
learning_rate=0.13,
algorithm='SAMME',
random_state=0)
bdt.set_params(options={})
return bdt
def getGradientBDTClassifier(options={}):
"""the standard BDT classifier based on Gradient Boosting"""
bdt = GradientBoostingClassifier(n_estimators=120,
learning_rate=0.13,
max_depth=5,
min_weight_fraction_leaf=0.01,
random_state=0)
bdt.set_params(**options)
return bdt
def getAdaBoostBDTRegressor(options={}):
"""the standard BDT regressor based on AdaBoost"""
clf = AdaBoostRegressor(DecisionTreeRegressor(max_depth=5),
n_estimators=500, learning_rate=0.13, loss='ls')
return clf
def getGradientBDTRegressor(options={}):
"""the standard BDT regressor based on Gradient Boosting"""
params = {'n_estimators': 20, 'max_depth': 5, 'min_samples_split': 2,
'learning_rate': 0.13, 'loss': 'ls'}
clf = GradientBoostingRegressor(**params)
return clf