@inbook{e8432e96a37d4ccc97824a4bd1bf1799,
title = "Learn++.MT: A new approach to incremental learning",
abstract = "An ensemble of classifiers based algorithm, Learn++, was recently introduced that is capable of incrementally learning new information from datasets that consecutively become available, even if the new data introduce additional classes that were not formerly seen. The algorithm does not require access to previously used datasets, yet it is capable of largely retaining the previously acquired knowledge. However, Learn++ suffers from the inherent {"}out-voting{"} problem when asked to learn new classes, which causes it to generate an unnecessarily large number of classifiers. This paper proposes a modified version of this algorithm, called Learn++.MT that not only reduces the number of classifiers generated, but also provides performance improvements. The out-voting problem, the new algorithm and its promising results on two benchmark datasets as well as on one real world application are presented.",
author = "Michael Muhlbaier and Apostolos Topalis and Robi Polikar",
note = "Copyright: Copyright 2020 Elsevier B.V., All rights reserved.",
year = "2004",
doi = "10.1007/978-3-540-25966-4_5",
language = "English (US)",
isbn = "3540221441",
series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
publisher = "Springer Verlag",
pages = "52--61",
editor = "Fabio Roli and Josef Kittler and Terry Windeatt",
booktitle = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
address = "Germany",
}