%\title{\LARGE iCaRL: incremental Classifier and Representation Learning}
\title{Computer Vision and Machine Learning}
\author{}
\author{~}
\institute{\vskip-.5\baselineskip\large Institute of Science and Technology (IST) Austria, 3400 Klosterneuburg, Austria}
%\institute{~}%Christoph Lampert} %\textsuperscript{1} ENS Rennes (Ecole Normale Sup\'{e}rieure de Rennes), Rennes, France \textsuperscript{2} IST Austria (Institute of Science and Technology Austria), Klosterneuburg, Austria}
% Ratcliff, R. (1990) Connectionist models of recognition memory: Constraints imposed by learning and forgetting functions. Psychological Review,97, 285-308
\item fixed representation: freeze representation after first batch of classes
\item finetuning: ordinary NN learning, finetune whenever new classes come in
\item LwF: \emph{"Learning without Forgetting"}{\scriptsize [Li, Hoiem. 2016]}, use network itself to classify
%\item LwF+proto: like LwF, but with prototypes used for representation learning
\item iNCM: like iCaRL, but store all images and classify with true class means
\end{itemize}
\end{block}
\vskip4\blockskip
\begin{block}{\Large 7) Summary}
\begin{itemize}
\item iCaRL learns incrementally with a fixed memory footprint
\item much better results than baselines, on par with (intractable) iNCM
\end{itemize}
\end{block}
\bigskip\hrule\medskip\tiny
%[Thrun \etal, "Learning one more thing", \dots]
[Ans, Rousset. \emph{"Neural networks with a self-refreshing memory: Knowledge transfer in sequential learning tasks without catastrophic forgetting"}, Connection Science 12(1), 2000]
[Hinton, Vinyals, Dean. \emph{"Distilling the Knowledge in a Neural Network"}, NIPS Workshop on Deep Learning, 2014]
[Kuzborskij, Orabona, Caputo. \emph{"From N to N+1: Multiclass transfer incremental learning"}, CVPR 2013]
[Mandziuk, Shastri. \emph{"Incremental class learning approach and its application to handwritten digit recognition"}, Information Sciences, 2002]
[McCloskey, Cohen. \emph{"Catastrophic interference in connectionist networks: The sequential learning problem"}, The Psychology of Learning and Motivation, 1989]
[Mensink, Verbeek, Perronnin, Csurka. \emph{"Distance-based image classification: Generalizing to new Classes at near-zero cost"}, 2013]
[Li, Hoiem. \emph{"Learning without forgetting"}, ECCV 2016]
[Ristin, Huillaumin, Gall, van Gool. \emph{"Incremental learning of NCM forests for large-scale image classification"}, CVPR 2014]
%[Ans, "Sequential Learning in Distributed Neural Networks without Catastrophic Forgetting: A Single and Realistic Self-Refreshing Memory Can Do It", Neural Information Processing--Letters and Reviews. 2004]
%[He, Zhang, Ren, Sun. Deep residual learning for image recognition. CVPR 2016]
%[Rocco De Rosa, Thomas Mensink, and Barbara Caputo, "Online Open World Recognition"] arXiv