123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899 |
- \begin{thebibliography}{10}
- \bibitem{Aljundi2019OnlineCL}
- R.~Aljundi, L.~Caccia, E.~Belilovsky, M.~Caccia, M.~Lin, L.~Charlin, and
- T.~Tuytelaars.
- \newblock Online continual learning with maximally interfered retrieval.
- \newblock {\em ArXiv}, abs/1908.04742, 2019.
- \bibitem{Aljundi2019GradientBS}
- R.~Aljundi, M.~Lin, B.~Goujaud, and Y.~Bengio.
- \newblock Gradient based sample selection for online continual learning.
- \newblock In {\em NeurIPS}, 2019.
- \bibitem{chaudhry2018riemannian}
- A.~Chaudhry, P.~K. Dokania, T.~Ajanthan, and P.~H. Torr.
- \newblock Riemannian walk for incremental learning: Understanding forgetting
- and intransigence.
- \newblock In {\em Proceedings of the European Conference on Computer Vision
- (ECCV)}, pages 532--547, 2018.
- \bibitem{kirkpatrick2017overcoming}
- J.~Kirkpatrick, R.~Pascanu, N.~Rabinowitz, J.~Veness, G.~Desjardins, A.~A.
- Rusu, K.~Milan, J.~Quan, T.~Ramalho, A.~Grabska-Barwinska, et~al.
- \newblock Overcoming catastrophic forgetting in neural networks.
- \newblock {\em Proceedings of the national academy of sciences},
- 114(13):3521--3526, 2017.
- \bibitem{Knoblauch2020OptimalCL}
- J.~Knoblauch, H.~Husain, and T.~Diethe.
- \newblock Optimal continual learning has perfect memory and is np-hard.
- \newblock In {\em ICML}, 2020.
- \bibitem{li2017learning}
- Z.~Li and D.~Hoiem.
- \newblock Learning without forgetting.
- \newblock {\em IEEE transactions on pattern analysis and machine intelligence},
- 40(12):2935--2947, 2017.
- \bibitem{Lomonaco2020RehearsalFreeCL}
- V.~Lomonaco, D.~Maltoni, and L.~Pellegrini.
- \newblock Rehearsal-free continual learning over small non-i.i.d. batches.
- \newblock {\em 2020 IEEE/CVF Conference on Computer Vision and Pattern
- Recognition Workshops (CVPRW)}, pages 989--998, 2020.
- \bibitem{Lomonaco2020CVPR2C}
- V.~Lomonaco, L.~Pellegrini, P.~Rodr{\'i}guez, M.~Caccia, Q.~She, Y.~Chen,
- Q.~Jodelet, R.~Wang, Z.~Mai, D.~V{\'a}zquez, G.~I. Parisi, N.~Churamani,
- M.~Pickett, I.~H. Laradji, and D.~Maltoni.
- \newblock Cvpr 2020 continual learning in computer vision competition:
- Approaches, results, current challenges and future directions.
- \newblock {\em ArXiv}, abs/2009.09929, 2020.
- \bibitem{lopez2017gradient}
- D.~Lopez-Paz and M.~Ranzato.
- \newblock Gradient episodic memory for continual learning.
- \newblock volume~30, pages 6467--6476, 2017.
- \bibitem{Maltoni2019ContinuousLI}
- D.~Maltoni and V.~Lomonaco.
- \newblock Continuous learning in single-incremental-task scenarios.
- \newblock {\em Neural networks : the official journal of the International
- Neural Network Society}, 116:56--73, 2019.
- \bibitem{Pellegrini2020LatentRF}
- L.~Pellegrini, G.~Graffieti, V.~Lomonaco, and D.~Maltoni.
- \newblock Latent replay for real-time continual learning.
- \newblock {\em 2020 IEEE/RSJ International Conference on Intelligent Robots and
- Systems (IROS)}, pages 10203--10209, 2020.
- \bibitem{Pelosin2021MoreIB}
- F.~Pelosin and A.~Torsello.
- \newblock More is better: An analysis of instance quantity/quality trade-off in
- rehearsal-based continual learning.
- \newblock {\em ArXiv}, abs/2105.14106, 2021.
- \bibitem{Prabhu2020GDumbAS}
- A.~Prabhu, P.~H.~S. Torr, and P.~Dokania.
- \newblock Gdumb: A simple approach that questions our progress in continual
- learning.
- \newblock In {\em ECCV}, 2020.
- \bibitem{rebuffi2017icarl}
- S.-A. Rebuffi, A.~Kolesnikov, G.~Sperl, and C.~H. Lampert.
- \newblock icarl: Incremental classifier and representation learning.
- \newblock In {\em Proceedings of the IEEE conference on Computer Vision and
- Pattern Recognition}, pages 2001--2010, 2017.
- \bibitem{shin2017continual}
- H.~Shin, J.~K. Lee, J.~Kim, and J.~Kim.
- \newblock Continual learning with deep generative replay.
- \newblock {\em arXiv preprint arXiv:1705.08690}, 2017.
- \bibitem{zenke2017continual}
- F.~Zenke, B.~Poole, and S.~Ganguli.
- \newblock Continual learning through synaptic intelligence.
- \newblock In {\em International Conference on Machine Learning}, pages
- 3987--3995. PMLR, 2017.
- \end{thebibliography}
|