@inproceedings{3b3552473a7c4d2f9edc370bcd904c41,
title = "Marginal Replay vs Conditional Replay for Continual Learning",
abstract = "We present a new replay-based method of continual classification learning that we term “conditional replay” which generates samples and labels together by sampling from a distribution conditioned on the class. We compare conditional replay to another replay-based continual learning paradigm (which we term “marginal replay”) that generates samples independently of their class and assigns labels in a separate step. The main improvement in conditional replay is that labels for generated samples need not be inferred, which reduces the margin for error in complex continual classification learning tasks. We demonstrate the effectiveness of this approach using novel and standard benchmarks constructed from MNIST and FashionMNIST data, and compare to the regularization-based elastic weight consolidation (EWC) method [17, 34].",
keywords = "Continual learning, Generative models, Generative replay",
author = "Timoth{\'e}e Lesort and Alexander Gepperth and Andrei Stoian and David Filliat",
note = "Publisher Copyright: {\textcopyright} 2019, Springer Nature Switzerland AG.; 28th International Conference on Artificial Neural Networks: Workshop and Special Sessions, ICANN 2019 ; Conference date: 17-09-2019 Through 19-09-2019",
year = "2019",
month = jan,
day = "1",
doi = "10.1007/978-3-030-30484-3\_38",
language = "English",
isbn = "9783030304836",
series = "Lecture Notes in Computer Science",
publisher = "Springer Verlag",
pages = "466--480",
editor = "Tetko, \{Igor V.\} and Pavel Karpov and Fabian Theis and Vera Kurkov{\'a}",
booktitle = "Artificial Neural Networks and Machine Learning – ICANN 2019",
}