-
Notifications
You must be signed in to change notification settings - Fork 0
/
citation.bib
23 lines (22 loc) · 1.61 KB
/
citation.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
@InProceedings{10.1007/978-3-031-40725-3_9,
author="Emami, Seyedsaman
and Ruiz Pastor, Carlos
and Mart{\'i}nez-Mu{\~{n}}oz, Gonzalo",
editor="Garc{\'i}a Bringas, Pablo
and P{\'e}rez Garc{\'i}a, Hilde
and Mart{\'i}nez de Pis{\'o}n, Francisco Javier
and Mart{\'i}nez {\'A}lvarez, Francisco
and Troncoso Lora, Alicia
and Herrero, {\'A}lvaro
and Calvo Rolle, Jos{\'e} Luis
and Quinti{\'a}n, H{\'e}ctor
and Corchado, Emilio",
title="Multi-Task Gradient Boosting",
booktitle="Hybrid Artificial Intelligent Systems",
year="2023",
publisher="Springer Nature Switzerland",
address="Cham",
pages="97--107",
abstract="Gradient Boosting Machines (GBMs) have revealed outstanding proficiency in various machine learning applications, such as classification and regression. Gradient boosting builds a set of regression models in an iterative process, in which at each iteration, a regressor model is trained to reduce a given loss on a given objective. This paper proposes an extension of gradient boosting that can handle multi-task problems, that is, problems in which the tasks share the attribute space but not necessarily the data distribution. The objective of the proposed algorithm is to split the GB process into two phases, one in which the base models learn the multiple interconnected tasks simultaneously, and a second one, in which different models are built to optimize the loss function on each task. The performance of proposed model shows a better overall performance with respect to models that learn the tasks independently and all tasks together in several multi-task regression and classification problems.",
isbn="978-3-031-40725-3"
}