Commit 09af905b authored by Amelie Royer's avatar Amelie Royer

modify Poster skeleton

parent 3d5fb1b6
......@@ -71,9 +71,9 @@
\setbeamertemplate{itemize item}[default]
%\title{\LARGE iCaRL: incremental Classifier and Representation Learning}
\title{Incremental Classifier and Representation Learning}
\author{\large Sylvestre-Alvise Rebuffi$^{\dag,*}$, Alexander Kolesnikov$^{*}$, Christoph H. Lampert$^{*}$}
\institute{\vskip-.5\baselineskip\large $^{\dag}$ CentraleSup\'elec\qquad $^{*}$ IST Austria}
\title{Computer Vision and Machine Learning}
\author{}
\institute{\vskip-.5\baselineskip\large Institute of Science and Technology (IST) Austria, 3400 Klosterneuburg, Austria}
%\institute{~}%Christoph Lampert} %\textsuperscript{1} ENS Rennes (Ecole Normale Sup\'{e}rieure de Rennes), Rennes, France \textsuperscript{2} IST Austria (Institute of Science and Technology Austria), Klosterneuburg, Austria}
%\date[]{}
......@@ -139,17 +139,19 @@
\vspace*{-1.5cm}
\ \ \begin{block}{\Large Abstract}
%\large
We introduce \bblue{iCaRL}, a method for simultaneously learning classifiers
and a feature representation from training data in which classes
occur incrementally.
iCaRL uses a \blue{nearest-mean-of-exemplars} classifier, \blue{herding for
adaptive exemplar selection} and \blue{distillation for representation learning
without catastrophic forgetting}.
%
Experiments on CIFAR and ILSVRC\,2012 show that iCaRL can learn incrementally over a long period of time where other methods quickly fail.
\begin{block}{\Large People}
\newcommand{\peopleheight}{6cm}
\begin{center}
\includegraphics[height=\peopleheight{}]{people/clampert.jpg} ~~
\includegraphics[height=\peopleheight{}]{people/akolesnikov-new.jpg} ~~
\includegraphics[height=\peopleheight{}]{people/nkonstan.jpg} ~~
\includegraphics[height=\peopleheight{}]{people/apentina-new.jpg} ~~
\includegraphics[height=\peopleheight{}]{people/bphuong.jpg} ~~
\includegraphics[height=\peopleheight{}]{people/srebuffi.jpg} ~~
\includegraphics[height=\peopleheight{}]{people/aroyer.jpg} ~~
\includegraphics[height=\peopleheight{}]{people/gsperl.jpg} ~~
\includegraphics[height=\peopleheight{}]{people/azimin.jpg} ~~
\end{center}
\end{block}
\vskip-1cm
......@@ -157,7 +159,7 @@ Experiments on CIFAR and ILSVRC\,2012 show that iCaRL can learn incrementally o
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%% First COlumn
\ \ \ \begin{column}{.49\textwidth}
\begin{block}{\Large 1) Motivation}
\begin{block}{\Large Multi-Task Learning}
\bigskip
Object categorization methods are trained to recognize \textbf{1000s of classes}:
......@@ -188,8 +190,9 @@ $\rightarrow$ huge computational cost, all training data must be kept around {\c
Potential solution: \bblue{class-incremental learning}
\end{block}
\vskip4\blockskip
\begin{block}{\Large 3) Existing Approaches}
\begin{block}{\Large Conditional Risk}
\textbf{Fixed data representation:}
\begin{itemize}
\item retrain classifiers on data subset with biased regularization {\scriptsize [Kuzborskij \etal, 2013]}
......@@ -207,7 +210,7 @@ Potential solution: \bblue{class-incremental learning}
\end{block}
\vskip4\blockskip
\begin{block}{\Large 4) iCaRL} % {\scriptsize [arXiv \dots]}}
\begin{block}{\Large iCaRL} % {\scriptsize [arXiv \dots]}}
We incrementally learn \blue{classifiers and features} with a fixed-size network.
%Notation:
......@@ -275,8 +278,10 @@ p^y_k \leftarrow\!\argmin\limits_{x\in X^y} \Big\| \frac{1}{n_y}\sum_{i=1}^{n_y}
\end{column}
%
\ \ \begin{column}{.495\textwidth}
\begin{block}{\Large 2) Class-Incremental Learning}
\begin{block}{\Large Multi-output Distillation}
\textbf{Situation:}
\begin{itemize}
\item classes appear sequentially (or in batches) % $c_1,c_2,\dots,c_T$
......@@ -307,7 +312,7 @@ p^y_k \leftarrow\!\argmin\limits_{x\in X^y} \Big\| \frac{1}{n_y}\sum_{i=1}^{n_y}
\end{block}
\vskip4\blockskip
\begin{block}{\Large 5) Experiments (excerpt)}
\begin{block}{\Large Flexible Fine-tuning}
\vskip4\blockskip
\mbox{
......@@ -341,20 +346,7 @@ p^y_k \leftarrow\!\argmin\limits_{x\in X^y} \Big\| \frac{1}{n_y}\sum_{i=1}^{n_y}
\end{itemize}
\end{block}
\vskip4\blockskip
\begin{block}{\Large 6) Results (excerpt)}
\centerline{\qquad\qquad\qquad\textbf{CIFAR-100}\qquad\hfill\textbf{ImageNet ILSVRC~2012}\quad\ \ }
\centerline{\includegraphics[height=.3\textwidth]{cifar-cumul10-legend}\includegraphics[height=.3\textwidth]{imagenet-cumul10_top5}}
\medskip
\textbf{Discussion:}
\begin{itemize}
\item as expected: fixed representation and finetuning do not work well
\item iCaRL is able to keep good classification accuracy for many iterations
\item \emph{"Learning without Forgetting"} starts to forget earlier %(even with prototypes)
\item mean-of-exemplars on par with (intractable) iNCM
\end{itemize}
\end{block}
\vskip4\blockskip
\begin{block}{\Large 7) Summary}
......@@ -364,6 +356,7 @@ p^y_k \leftarrow\!\argmin\limits_{x\in X^y} \Big\| \frac{1}{n_y}\sum_{i=1}^{n_y}
\end{itemize}
\end{block}
\bigskip\hrule\medskip\tiny
%[Thrun \etal, "Learning one more thing", \dots]
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment