summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorLeonard Kugis <leonard@kug.is>2023-01-11 23:52:17 +0100
committerLeonard Kugis <leonard@kug.is>2023-01-11 23:52:17 +0100
commit9a36663414b96f30652ba5503753f7c16a7dcaa6 (patch)
tree977c110f205dca3e4e9ddc17c6b134798ffe4dff
parent416fa49bde99c69d6453976beb2a0b5c8db1e5a4 (diff)
Deleted backup slides
-rw-r--r--Presentation/presentation.tex102
-rw-r--r--Presentation/transcript.md4
2 files changed, 53 insertions, 53 deletions
diff --git a/Presentation/presentation.tex b/Presentation/presentation.tex
index 6f6b970..4cdd193 100644
--- a/Presentation/presentation.tex
+++ b/Presentation/presentation.tex
@@ -66,17 +66,17 @@ backend=biber,
\begin{figure}[h]
\centering
\includegraphics[width=\textwidth, keepaspectratio]{resources/cnn}
- \caption{Deep Neural Network \cite{726791}}
+ \caption{Deep Neural Network (LeNet) \cite{726791}}
\end{figure}
\end{frame}
-\begin{frame}{Deep Neural Networks}
- \begin{figure}[h]
- \centering
- \includegraphics[width=\textwidth, keepaspectratio]{resources/fcn}
- \caption{Fully connected layer}
- \end{figure}
-\end{frame}
+% \begin{frame}{Deep Neural Networks}
+% \begin{figure}[h]
+% \centering
+% \includegraphics[width=\textwidth, keepaspectratio]{resources/fcn}
+% \caption{Fully connected layer}
+% \end{figure}
+% \end{frame}
\begin{frame}{Deep Neural Networks}
\begin{figure}[h]
@@ -140,20 +140,20 @@ backend=biber,
\tableofcontents[currentsection]
\end{frame}
-\begin{frame}{Data types}
- \begin{itemize}
- \item Dynamic
- \begin{itemize}
- \item Input data
- \item Output data
- \end{itemize}
- \item Static (parameters)
- \begin{itemize}
- \item Weights
- \item Parameters of activation functions
- \end{itemize}
- \end{itemize}
-\end{frame}
+% \begin{frame}{Data types}
+% \begin{itemize}
+% \item Dynamic
+% \begin{itemize}
+% \item Input data
+% \item Output data
+% \end{itemize}
+% \item Static (parameters)
+% \begin{itemize}
+% \item Weights
+% \item Parameters of activation functions
+% \end{itemize}
+% \end{itemize}
+% \end{frame}
\begin{frame}{AlexNet}
\begin{itemize}
@@ -240,14 +240,6 @@ backend=biber,
\end{itemize}
\end{frame}
-\begin{frame}{Weight quantization}
- \begin{figure}[h]
- \centering
- \includegraphics[width=0.8\textwidth, keepaspectratio]{resources/centroid_initialization}
- \caption{Different centroid initialization methods \cite{Han2015DeepCC}}
- \end{figure}
-\end{frame}
-
\begin{frame}{Huffman encoding}
\begin{figure}[h]
\centering
@@ -264,25 +256,6 @@ backend=biber,
\end{figure}
\end{frame}
-\begin{frame}{HashNets}
- \begin{minipage}{0.49\linewidth}
- \begin{figure}[h]
- \centering
- \includegraphics[width=\textwidth, keepaspectratio]{resources/hashnets}
- \end{figure}
- \end{minipage}
- \hfill
- \begin{minipage}{0.49\linewidth}
- \begin{itemize}
- \item Virtual weight matrix $\textbf{V}^{\ell}$
- \item One-way hash function $h^{\ell}(i, j)$
- \item Weight array $w^{\ell}$
- \item Hash function returns index for weight array
- \item $w^{\ell}_{h^{\ell}(i, j)} = \textbf{V}^{\ell}_{ij}$
- \end{itemize}
- \end{minipage}
-\end{frame}
-
\begin{frame}{Storage format}
\begin{itemize}
\item Compressed sparse column (CSC) /
@@ -400,9 +373,9 @@ backend=biber,
\hfill
\begin{minipage}{0.59\linewidth}
\begin{itemize}
- \item Receives column vector $v$, absolute destination accumulator register index $x$ and activation value $a_j$
+ \item Receives column vector $v$, relative index $z$ and activation value $a_j$
+ \item Calculates absolute destination accumulator register index $x$
\item Calculates $b_x = b_x + v \cdot a_j$
- \item Accumulates indices $x$ and forwards real target address
\end{itemize}
\end{minipage}
\end{frame}
@@ -503,4 +476,31 @@ backend=biber,
End
\end{frame}
+\begin{frame}{Weight quantization}
+ \begin{figure}[h]
+ \centering
+ \includegraphics[width=0.8\textwidth, keepaspectratio]{resources/centroid_initialization}
+ \caption{Different centroid initialization methods \cite{Han2015DeepCC}}
+ \end{figure}
+\end{frame}
+
+\begin{frame}{HashNets}
+ \begin{minipage}{0.49\linewidth}
+ \begin{figure}[h]
+ \centering
+ \includegraphics[width=\textwidth, keepaspectratio]{resources/hashnets}
+ \end{figure}
+ \end{minipage}
+ \hfill
+ \begin{minipage}{0.49\linewidth}
+ \begin{itemize}
+ \item Virtual weight matrix $\textbf{V}^{\ell}$
+ \item One-way hash function $h^{\ell}(i, j)$
+ \item Weight array $w^{\ell}$
+ \item Hash function returns index for weight array
+ \item $w^{\ell}_{h^{\ell}(i, j)} = \textbf{V}^{\ell}_{ij}$
+ \end{itemize}
+ \end{minipage}
+\end{frame}
+
\end{document} \ No newline at end of file
diff --git a/Presentation/transcript.md b/Presentation/transcript.md
index 513656b..b2ba006 100644
--- a/Presentation/transcript.md
+++ b/Presentation/transcript.md
@@ -72,11 +72,11 @@
## Komprimierung
-- Welche verschiedenen Speicherarten haben wir in einem Accelerator?
+<!-- - Welche verschiedenen Speicherarten haben wir in einem Accelerator?
- Dynamisch: Eingabedaten
- Statisch: Gewichte, Parameter für Aktivierungsfunktionen
-*nächste Folie*
+*nächste Folie* -->
## AlexNet