@inproceedings{d81bd0f4d18643b08f9aacb9be21f917,
title = "Adaptive XGBoost for Evolving Data Streams",
abstract = "Boosting is an ensemble method that combines base models in a sequential manner to achieve high predictive accuracy. A popular learning algorithm based on this ensemble method is eXtreme Gradient Boosting (XGB). We present an adaptation of XGB for classification of evolving data streams. In this setting, new data arrives over time and the relationship between the class and the features may change in the process, thus exhibiting concept drift. The proposed method creates new members of the ensemble from mini-batches of data as new data becomes available. The maximum ensemble size is fixed, but learning does not stop when this size is reached because the ensemble is updated on new data to ensure consistency with the current concept. We also explore the use of concept drift detection to trigger a mechanism to update the ensemble. We test our method on real and synthetic data with concept drift and compare it against batch-incremental and instance-incremental classification methods for data streams.",
keywords = "Boosting, Classification, Ensembles, Stream Learning",
author = "Jacob Montiel and Rory Mitchell and Eibe Frank and Bernhard Pfahringer and Talel Abdessalem and Albert Bifet",
note = "Publisher Copyright: {\textcopyright} 2020 IEEE.; 2020 International Joint Conference on Neural Networks, IJCNN 2020 ; Conference date: 19-07-2020 Through 24-07-2020",
year = "2020",
month = jul,
day = "1",
doi = "10.1109/IJCNN48605.2020.9207555",
language = "English",
series = "Proceedings of the International Joint Conference on Neural Networks",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
booktitle = "2020 International Joint Conference on Neural Networks, IJCNN 2020 - Proceedings",
}