From 072d26e766931007a0f243674f7dfdff5c3104e9 Mon Sep 17 00:00:00 2001 From: Thomas Mesnard Date: Mon, 28 Dec 2015 20:51:50 +0100 Subject: Add plot More TIMIT ; log domain TIMIT: more complexity Nice poster Beautify code (mostly, add comments) Add final stuff. --- Report/ENSCachan.png | Bin 0 -> 71589 bytes Report/Makefile | 5 + Report/Report.bib | 8 + Report/Report.tex | 459 +++++++ Report/azerty1.png | Bin 0 -> 51063 bytes Report/azerty1.svg | 888 ++++++++++++++ Report/azerty2.png | Bin 0 -> 23320 bytes Report/azerty2.svg | 524 ++++++++ Report/azerty3.png | Bin 0 -> 89245 bytes Report/azerty3.svg | 989 ++++++++++++++++ Report/azerty4.png | Bin 0 -> 60795 bytes Report/azerty4.svg | 1968 +++++++++++++++++++++++++++++++ Report/ctc_cost_best.png | Bin 0 -> 47726 bytes Report/ens2.jpg | Bin 0 -> 87054 bytes Report/final/Projet_Auvolat_Mesnard.zip | Bin 0 -> 15745 bytes Report/les_resultats.py | 27 + Report/mila.png | Bin 0 -> 2614 bytes Report/pouet_timit_ER.png | Bin 0 -> 24143 bytes Report/pouet_timit_cost.png | Bin 0 -> 36683 bytes 19 files changed, 4868 insertions(+) create mode 100644 Report/ENSCachan.png create mode 100644 Report/Makefile create mode 100644 Report/Report.bib create mode 100644 Report/Report.tex create mode 100644 Report/azerty1.png create mode 100644 Report/azerty1.svg create mode 100644 Report/azerty2.png create mode 100644 Report/azerty2.svg create mode 100644 Report/azerty3.png create mode 100644 Report/azerty3.svg create mode 100644 Report/azerty4.png create mode 100644 Report/azerty4.svg create mode 100644 Report/ctc_cost_best.png create mode 100755 Report/ens2.jpg create mode 100644 Report/final/Projet_Auvolat_Mesnard.zip create mode 100644 Report/les_resultats.py create mode 100644 Report/mila.png create mode 100644 Report/pouet_timit_ER.png create mode 100644 Report/pouet_timit_cost.png (limited to 'Report') diff --git a/Report/ENSCachan.png b/Report/ENSCachan.png new file mode 100644 index 0000000..104b000 Binary files /dev/null and b/Report/ENSCachan.png differ diff --git a/Report/Makefile b/Report/Makefile new file mode 100644 index 0000000..22e9687 --- /dev/null +++ b/Report/Makefile @@ -0,0 +1,5 @@ +Report.pdf: Report.tex + pdflatex -interaction nonstopmode Report.tex + bibtex Report + pdflatex -interaction nonstopmode Report.tex + pdflatex -interaction nonstopmode Report.tex diff --git a/Report/Report.bib b/Report/Report.bib new file mode 100644 index 0000000..65c0e6c --- /dev/null +++ b/Report/Report.bib @@ -0,0 +1,8 @@ +@inproceedings{graves2006connectionist, + title={Connectionist temporal classification: labelling unsegmented sequence data with recurrent neural networks}, + author={Graves, Alex and Fern{\'a}ndez, Santiago and Gomez, Faustino and Schmidhuber, J{\"u}rgen}, + booktitle={Proceedings of the 23rd international conference on Machine learning}, + pages={369--376}, + year={2006}, + organization={ACM} +} diff --git a/Report/Report.tex b/Report/Report.tex new file mode 100644 index 0000000..963089e --- /dev/null +++ b/Report/Report.tex @@ -0,0 +1,459 @@ +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +\documentclass[11pt,a4paper]{article} +% XXX Use this line if you want two columns +% \documentclass[10pt,a4paper,twocolumn]{article} +% This first part of the file is called the PREAMBLE. It includes +% customizations and command definitions. The preamble is everything +% between \documentclass and \begin{document}. + +%% Well understand input, and nice output +\usepackage[utf8]{inputenc} +\usepackage{lmodern} +\usepackage[T1]{fontenc} + +\usepackage[margin=1.00in,top=1.25in,bottom=1.25in]{geometry} % set the margins +\usepackage{latexsym} % symbols +\usepackage{amsmath} % great math stuff +\usepackage{amssymb} % great math symbols +\usepackage{amsfonts} % for blackboard bold, etc + +\usepackage{titling} +\usepackage{enumerate} +\usepackage{fixltx2e} % LaTeX patches, \textsubscript +\usepackage{cmap} % fix search and cut-and-paste in Acrobat +\usepackage{color} +\definecolor{darkgreen}{rgb}{0.0,0.55,0.0} % Defines the color used for link + + +\usepackage[plainpages=false,pdfcenterwindow=true, + pdftoolbar=false,pdfmenubar=false, + pdftitle={TITLE}, + pdfauthor={AUTHOR}, + linkcolor=black,citecolor=black,filecolor=black,urlcolor=black]{hyperref} + % Allows url with \href{http://truc.machin/ok?r=yes}{myref} +\usepackage{graphicx} % to include figures +\graphicspath{{fig/}} % Directory in which figures are stored +\usepackage{caption} +\usepackage{subcaption} +\usepackage{multicol} +\usepackage{float} +\usepackage{palatino} % Use the Palatino font % XXX +\usepackage{graphics} +\usepackage{indentfirst} +\usepackage{graphicx} + +% various theorems, numbered by section +\newtheorem{thm}{Theorem}[section] +\newtheorem{lem}[thm]{Lemma} +\newtheorem{proof}[thm]{Proof} + +\usepackage{lastpage,fancyhdr} +\pagestyle{fancyplain} + \renewcommand{\headrulewidth}{0.2pt} + \renewcommand{\footrulewidth}{0.2pt} + \lhead{\emph{Connectionist Temporal Classification}} + %\rhead{\emph{\today}} + \rhead{\emph{January 13, 2016}} + \lfoot{\textsc{\'Ecole Normale Sup\'erieure de Paris}} + \cfoot{\thepage/\pageref{LastPage}} + \rfoot{\textsc{Alex Auvolat, Thomas Mesnard}} + +%% Horizontal Lines +\providecommand*{\hr}[1][class-arg]{% + \hspace*{\fill}\hrulefill\hspace*{\fill} + \vskip 0.65\baselineskip +} + + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +\begin{document} +\DeclareGraphicsExtensions{.pdf,.png,.jpg} + +\pretitle{ + \begin{center} + \vspace{-5em} + \includegraphics[width=3cm]{ens2.jpg}\hfill + \includegraphics[width=3cm]{ENSCachan.png}\hfill + \includegraphics[width=3cm]{mila.png}\\\vspace{3em} +} +\posttitle{\end{center}} +% +%------------------------------------------------------------------------------ +\title{\huge Connectionist Temporal Classification: \\Labelling Unsegmented Sequences with \\Recurrent Neural Networks \\ \Large{Research Project Report -- Probabilistic Graphical Models course} } +% \date{\today} +\date{} + +\maketitle +% ------------------------------------------------------------------------------ +% \vspace*{-1.5cm} +\begin{center} +% Authors + \begin{multicols}{2} + \setcounter{footnote}{-1} %% XXX + \textsc{Alex Auvolat} \\ + Department of Computer Science\\ + \'Ecole Normale Sup\'erieure de Paris \\ + \texttt{alex.auvolat@ens.fr}\\ + \textsc{Thomas Mesnard} + \footnote{If needed, see online at \url{https://github.com/thomasmesnard/CTC-LSTM} for the implementation, open-sourced under the Apache license.}\\ + Department of Computer Science\\ + \'Ecole Normale Sup\'erieure de Paris \\ + \texttt{thomas.mesnard@ens.fr}\\ + % \and % \and or \And or \AND + \end{multicols} + %\vspace*{-0.3cm} + % \'Ecole Normale Sup\'erieure de Paris \\ + % \texttt{\{alex.auvolat,thomas.mesnard\}@ens.fr} +\end{center} + +\vspace{1em} + +% ------------------------------------------------------------------------------ +\hr{} % Horizontal line, like
in HTML +%\hfill{} + +\vspace{1em} + +% ------------------------------------------------------------------------------ +% A small abstract of what is done in the paper +\begin{abstract} + Many real-world sequence learning tasks require the prediction of sequences of + labels from noisy, unsegmented input data. Recurrent neural networks (RNNs) are powerful sequence learners that would seem well suited to such tasks. However, because they require pre-segmented training data, and post-processing to transform their outputs into label sequences, they cannot be applied directly. Connectionist Temporal Classification is a method for training RNNs to label unsegmented sequences directly, thereby solving both problems. +\end{abstract} + +\vspace{1em} + +% ------------------------------------------------------------------------------ +\hr{} % Horizontal line, like
in HTML +% XXX remove if it is ugly or take too much space + +%\hfill{} + +\vspace{1em} + +% ------------------------------------------------------------------------------ +% About the report +\begin{center} + \textbf{Project Advisor:} Guillaume Obozinski \\ + \textbf{Course:} + \emph{``Probabilistic Graphical Models''}, by F. Bach, S. Lacoste-Julien, G. Obozinski \\ + \textbf{For} the \href{http://www.math.ens-cachan.fr/version-anglaise/academics/mva-master-degree-227777.kjsp}{Math\'ematiques, Vision, Apprentissage (MVA)} Master 2 + at \href{http://www.ens-cachan.fr/}{ENS de Cachan}. +\end{center} + +%\footnote{If needed, see online at \href{https://github.com/thomasmesnard/CTC-LSTM}{https://github.com/thomasmesnard/CTC-LSTM}for an e-version of this report, as well as additional resources, open-sourced under the \href{http://opensource.org/licenses/MIT}{MIT License}} + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +\newpage + +\section{Model} + +Connectionist Temporal Classification (CTC) introduces a new cost function for training recurrent neural networks to label unsegmented sequences directly. +To use this cost function, we introduce an additional blank symbol in the possible labels that the recurrent neural networks can output. We remind that the output layer of the recurrent neural networks correspond to probabilities over all possible labels. + +This additional blank symbol give freedom to the RNN to give the label for a section of input at any moment, and especially when it is sure of its answer, simply by outputting the blank label the rest of the time. + +The blank label also allows the network to give a strong probability to the correct label at a very localized point in time, whereas in a classical setup we observe distributed probabilities over all labels at each time step. An example of both behaviors can be observed on speech data in Figure \ref{Example}. + +\begin{figure}[h] + \centering + \includegraphics[width=1\linewidth]{azerty3.png} + \caption{\small Output of classic framewise phoneme classification and RNN trained with CTC} + \label{Example} +\end{figure} + +As the output sequence is shorter than the input sequence, there are many possible alignments with the correct label sequence. We want the recurrent neural network to learn one of these correct alignments on its own. Using dynamic programing to sum over all the possible alignments, CTC provides gradients for the backpropagation phase to train the RNN to learn a good alignment. + +In the basic setup shown in Figure~\ref{Setup}, we use stacked bidirectional recurrent neural networks. The CTC brick takes as inputs the output of the last bidirectional recurrent neural network as well as the target label sequence, and calculates the cost of the label. When differentiated, the CTC brick gives back gradients to train the RNN and learn a good alignment. + + +% \begin{figure}[h] +% \centering +% \includegraphics[width=0.3\linewidth]{azerty4.png} +% \caption{Simple bidirectional RNN model with CTC cost layer} +% \label{Setup} +% \end{figure} + +We use the following notation: + +\begin{itemize} + \item $y_k^t$: output at time $t$ for symbol $k$ + \item $l$: the label sequence for which we want to calculate a cost + \item $l'$: the same label sequence but with blanks added between each letters +\end{itemize} + +\begin{figure}[H] + \hfill{} + \begin{minipage}{.4\textwidth} + \centering + \includegraphics[width=0.8\linewidth]{azerty4.png} + \caption{Simple bidirectional RNN model with CTC cost layer} + \label{Setup} + \end{minipage} + \hfill{} + \begin{minipage}{.5\textwidth} + \centering + \vspace{1em} + \includegraphics[width=1.0\linewidth]{azerty1.png} + \caption{Computation graph for $\alpha_t(s)$ (corresponds to an unrolling of the automaton that recognizes $\mathcal{B}^{-1}(l)$)} + \label{auto} + \end{minipage} + \hfill{} +\end{figure} + +\subsection{First Definition} + +CTC is a dynamic programming algorithm that calculates the following variable: + +\begin{equation} + \alpha_t(s) = \sum_{\substack{\pi \in N^T:\\\mathcal{B}(\pi_{1:t}) = l'_{1:s}}} \prod_{t'=1}^t y_{\pi_{t'}}^{t'} + \label{first} +\end{equation} + +Where: +\begin{itemize} + \item $\mathcal{B}$ is the transform that removes all blanks and duplicates + \item $\pi \in N^T: \mathcal{B}(\pi_{1:t}) = l'_{1:s}$ corresponds to all possible paths among all possible labels from time step $1$ to $t$ which give the $s$ first correct labels after we apply the $\mathcal{B}$ transform. + \item $y^{t'}$ corresponds to the output of the RNN at time $t'$ +\end{itemize} + +We can see that $\alpha_t(s)$ corresponds to the sum, over all possible paths between time step $1$ and $t$ that will give the $s$ first correct labels after removing all banks and duplicates, of the product of the probabilities output by the RNN on these paths. + +The formulation described in Equation~\ref{first} is equivalent to the unrolled automaton presented in Figure~\ref{auto}. +To allow for blanks in the output paths, we consider a modified label sequence $l'$, with blanks added to the beginning and the end and inserted between every pair of labels. The length of $l'$ is therefore $2|l|+1$. This allows transition between blank and non-blank labels, and also those between any pair of distinct non-blank labels. + +% \begin{figure}[h] +% \centering +% \includegraphics[width=0.6\linewidth]{azerty1.png} +% \caption{Computation graph for $\alpha_t(s)$ (corresponds to an unrolled automaton)} +% \label{auto} +% \end{figure} + + +\subsection{Recursive Definition} + +The definition of $\alpha_t(s)$ given above enables us to understand what this function calculates, but unfortunately it is not practical to compute. We will now see a recursive definition of the $\alpha_t(s)$ which provides a dynamic programming algorithm for our problem. This calculation is illustrated in Figure~\ref{auto}. + +We first have to initiate $\alpha_1$ with at first a blank label, then the first correct label and finally just zeros. Indeed, it is impossible that more than one correct label derive from only one output of the RNN: + +\[ +\begin{tabular}{rcl} + $\alpha_1(1)$ &=& $y_b^1$\\ + $\alpha_1(2)$ &=& $y_{l_1}^1$\\ + $\alpha_1(s)$ &=& $0, \forall s > 2$ +\end{tabular} +\] + +We then define the recurrence relations: +\[ +\alpha_t(s) = +\begin{cases} + \bar{\alpha}_t(s) y_{l'_s}^t & \mbox{ if } l'_s = b\mbox{ or }l'_{s-2}=l'_s \\ + (\bar{\alpha}_t(s)+\alpha_{t-1}(s-2)) y_{l'_s}^t &\mbox{ otherwise}\\ +\end{cases} +\] + +With: + +\[ +\bar{\alpha}_t(s) = \alpha_{t-1}(s) + \alpha_{t-1}(s-1) +\] + +Finally, we have: +\[ + p(l|x) = \alpha_T(|l'|) + \alpha_T(|l'|-1) +\] + +Which give us, given an input, the probability of having a particular sequence. + + +\subsection{Analysis of the Gradient Provided by CTC} + +\begin{figure}[h] + \centering + \includegraphics[width=0.6\linewidth]{azerty2.png} + \caption{Evolution of the CTC error signal} + \label{error} +\end{figure} + +In Figure \ref{error}, the left column shows the output activations for the same sequence at various stages of training. The dashed line corresponds to the blank unit. The right column shows the corresponding error signals, i.e.~the gradient which is sent back to the RNN. + +On the early stage of the training (a), the network does not make predictions because it has small random weights, the error is only determined by the target sequence and is localized only very vaguely. + +During the training (b), the network begins to make predictions and the error localizes around them. + +At the end of the training (c), the network strongly predicts the correct labels at very specific times, and the rest of the time outputs the blank label. The error signal becomes null since there is no error. + + +\section{Experiments} + +We made an implementation of the model using \href{https://github.com/Theano/Theano}{Theano} and \href{https://github.com/mila-udem/blocks/}{Blocks} which is a Theano framework for building and training neural networks. The code is available at the following address: \url{https://github.com/thomasmesnard/CTC-LSTM}. + +As Theano provides an automated symbolic differentiation tool, we implemented the forward recursion for $\alpha_t(s)$ and let Theano derive the backward calculation. To avoid numerical underflow, two methods can be applied, which we both implemented: + +\begin{itemize} + \item Normalize at each time step $t$ the $\alpha_t$: + \hspace{1em} + $ C_t = \sum_s \alpha_t(s)$ + \hspace{1em} + $\hat{\alpha}_t(s) = \frac{\alpha_t(s)}{C_t}$ + \item Do our calculations in the logarithmic domain + +\end{itemize} + +\subsection{Toy Dataset} + +We first tried our implementation on a simple task. Our dataset is composed of the following sequences, where each digit is randomly repeated a random number of times. + +\begin{center} +\begin{tabular}{c} + $1^*2^*3^*4^*5^* \to 1$ \\ + $1^*2^*3^*2^*1^* \to 2$ \\ + $5^*4^*3^*2^*1^* \to 3$ \\ + $5^*4^*3^*4^*5^* \to 4$ \\ +\end{tabular} +\end{center} + +The network has to learn the previous association. The two first sequences have an overlap on the three first digits. The network has to wait until the fourth digit to know if it is either a 1 or a 2. We can see here the importance of the blank label. Indeed, the network will have to outputs the blank label until the fourth label is presented to be able to give the correct label. The same reasoning can be applied for the two last sequences. + +We have two versions of the toy dataset: on the first version each piece of the sequence is perfectly identifiable as each character appears at least once (the input sequence is therefore at least 5 times as long as the target sequence). On the second version some characters may be omitted in the input, adding stochasticity to the task. CTC provides very good results on the two tasks (shown in Table~\ref{tbl_result} and Table~\ref{tbl_result2}). + +\begin{table}[H] + \centering + \vspace{2ex} + \begin{minipage}{0.45\textwidth} + \begin{tabular}{l l l} + %\toprule + \hline + \textbf{Results} & \textbf{train} & \textbf{valid}\\ + \hline + %\midrule + Output sequence length & 5 -- 50 & 5 -- 50 \\ + Error rate & 0 & 0 \\ + Mean edit distance & 0 & 0 \\ + Errors per character & 0 & 0 \\ + \hline + %\bottomrule + \end{tabular} + \caption{Performances of CTC on our toy dataset, with perfect input sequences} + \label{tbl_result} + \end{minipage} + \hfill{} + \begin{minipage}{0.45\textwidth} + \begin{tabular}{l l l} + %\toprule + \hline + \textbf{Results} & \textbf{train} & \textbf{valid}\\ + \hline + %\midrule + Output sequence length & 5 -- 20 & 5 -- 20 \\ + Error rate & 0.62 & 0.63 \\ + Mean edit distance & 1.0 & 1.1 \\ + Errors per character & 0.08 & 0.09 \\ + \hline + %\bottomrule + \end{tabular} + \caption{Performances of CTC on our toy dataset, with imperfect input sequences} + \label{tbl_result2} + \end{minipage} +\end{table} + +We observe that the model performs flawlessly on the first task. On the other task, the error rate is not as low as expected but we remind that it corresponds to the rate on which the network is able to find the complete target sequence. A more meaningful measure is the error rate per character which is only $9\%$ on the validation set. We deduce from these two results that the model is able to perfectly learn the rule that maps the input sequences to the targets, but even though, it cannot achieve perfect results on the second task as information is sometimes missing in the input. Both implementations converge in less than 1000 time steps. + +%\begin{figure}[h] +% \centering +% \includegraphics[width=0.6\linewidth]{ctc_cost_best.png} +% \caption{Training and validation cost of the CTC model (negative log likelihood)} +%\end{figure} + +\subsection{TIMIT} + +\begin{figure}[H] + \hfill{} + \begin{minipage}{.5\textwidth} + \centering + \includegraphics[width=0.95\linewidth]{pouet_timit_cost.png} + \caption{\footnotesize Training cost of the CTC model on TIMIT} + \label{timit_cost} + \end{minipage} + \hfill{} + \begin{minipage}{.45\textwidth} + \centering + \vspace{1em} + \includegraphics[width=0.95\linewidth]{pouet_timit_ER.png} + \caption{\footnotesize Error rate of the CTC model on TIMIT} + \label{timit_er} + \end{minipage} + \hfill{} +\end{figure} + + +\begin{table}[H] + \centering + \small + \begin{tabular}{l l l l l l} + \hline + \textbf{Convolution layers} & \textbf{Filters} & \textbf{Filter size} & \textbf{Stride} & \textbf{Skip} & \textbf{Normalize} \\ + \hline + Layer 1 & 20 & 200 & 10 & Yes & Yes \\ + Layer 2 & 20 & 200 & 10 & Yes & Yes \\ + Layer 3 & 20 & 30 & 2 & Yes & Yes \\ + Layer 4 & 100 & 20 & 2 & No & Yes \\ + \hline + \textbf{Recurrent layers} & \textbf{Size} & \textbf{Type} & \textbf{Bidirectional} & \textbf{Skip} & \textbf{Normalize} \\ + \hline + Layer 1 & 50 & LSTM & Yes & Yes & No \\ + Layer 2 & 50 & LSTM & Yes & Yes & No \\ + \hline + %\bottomrule + \end{tabular} + \caption{Structure of the Deep Neural Network for the TIMIT dataset} + \label{params_timit} +\end{table} + +We then tried on the classical TIMIT dataset. It is a raw speech signal dataset of 4120 sentences labelled by phonemes or by words. The average audio length is 50\,000 samples and the average sentence length is 38 phonemes. + +To avoid hand-crafted feature extraction on the speech signal, we use convolution layers before the bidirectional LSTM layers. We then use bidirectional LSTM layers, and of course the CTC cost function. We use noise and normalization on intermediate layers for regularization. The structure of our model is described in Table~\ref{params_timit}. + +We were able to attain a $50\%$ phoneme error rate on the validation set after about 150 epochs of training (see Figure~\ref{timit_er}). This result is not as good as the $30\%$ achieved by the original paper~\cite{graves2006connectionist}, however it is an extremely good result for the model as we do not use hand-crafted preprocessing on the data. This shows that the convolution layers are able to learn the filters necessary for audio processing on speech by themselves. + +\section{Conclusion} + +CTC is a powerful cost function for training RNNs on unsegmented data, now largely used in major commercial applications. We were able to get very good results using CTC on a toy dataset. We proposed a way of processing speech data with convolutional neural networks and were able to train a convnet-LSTM-CTC model with satisfactory results on TIMIT. +% ------------------------------------------------------------------------------ +% ------------------------------------------------------------------------------ + +%\vspace{4em} +%\hr{} % Horizontal line, like
in HTML +%\vspace{2em} + +% XXX remove if it is ugly or take too much space + + +%% Bibliography +\nocite{*} % XXX remove to hide references not cited! + +% To compile: pdflatex, pdflatex, bibtex, pdflatex, pdflatex +\bibliographystyle{unsrt} + +\small +\bibliography{Report} + + +% ------------------------------------------------------------------------------ +%\hr{} % Horizontal line, like
in HTML +% XXX remove if it is ugly or take too much space + + +%\vfill +%\subsubsection*{License} +%\begin{center} +% \begin{small} +% This paper (and the additional resources -- including code, images, etc), +% are publicly published under the terms of the \href{http://opensource.org/licenses/MIT}{MIT License}. +% Copyright 2015-2016 \copyright ~Alex~Auvolat and ~Thomas~Mesnard. +% \end{small} +%\end{center} + + +\end{document} diff --git a/Report/azerty1.png b/Report/azerty1.png new file mode 100644 index 0000000..9e2e283 Binary files /dev/null and b/Report/azerty1.png differ diff --git a/Report/azerty1.svg b/Report/azerty1.svg new file mode 100644 index 0000000..d86e525 --- /dev/null +++ b/Report/azerty1.svg @@ -0,0 +1,888 @@ + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Report/azerty2.png b/Report/azerty2.png new file mode 100644 index 0000000..c2636bb Binary files /dev/null and b/Report/azerty2.png differ diff --git a/Report/azerty2.svg b/Report/azerty2.svg new file mode 100644 index 0000000..0ccb650 --- /dev/null +++ b/Report/azerty2.svg @@ -0,0 +1,524 @@ + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + + + + (b) + + + + + + + + error + + + + + + + + + + + output + (c) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + (a) + + + + + + + + + + + diff --git a/Report/azerty3.png b/Report/azerty3.png new file mode 100644 index 0000000..13a7cd2 Binary files /dev/null and b/Report/azerty3.png differ diff --git a/Report/azerty3.svg b/Report/azerty3.svg new file mode 100644 index 0000000..d83e4d3 --- /dev/null +++ b/Report/azerty3.svg @@ -0,0 +1,989 @@ + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + + + + 0 + + + + + + + + + + label probability + + + + + + + + + + + + + + + + """""" + + + 1 + 0 + 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + n + + + dcl + + + dixv + Framewise + + + the sound of + + + Waveform + CTC + dhaxsaw + + + + + + + + + diff --git a/Report/azerty4.png b/Report/azerty4.png new file mode 100644 index 0000000..97e0b7b Binary files /dev/null and b/Report/azerty4.png differ diff --git a/Report/azerty4.svg b/Report/azerty4.svg new file mode 100644 index 0000000..317171a --- /dev/null +++ b/Report/azerty4.svg @@ -0,0 +1,1968 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + Input Sequence + + + + + + + + + + CTC layer + + + + + + + + + + + + + + + + + + + + + + + + ... + + + + + + + + + + + + + + + + ... + + + + h + + + + h + + + + h + + + + h + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ... + + + + h + + + + h + + + + h + + + + h + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Cost + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Target Seq. + + + diff --git a/Report/ctc_cost_best.png b/Report/ctc_cost_best.png new file mode 100644 index 0000000..63bd584 Binary files /dev/null and b/Report/ctc_cost_best.png differ diff --git a/Report/ens2.jpg b/Report/ens2.jpg new file mode 100755 index 0000000..22463ab Binary files /dev/null and b/Report/ens2.jpg differ diff --git a/Report/final/Projet_Auvolat_Mesnard.zip b/Report/final/Projet_Auvolat_Mesnard.zip new file mode 100644 index 0000000..92d3945 Binary files /dev/null and b/Report/final/Projet_Auvolat_Mesnard.zip differ diff --git a/Report/les_resultats.py b/Report/les_resultats.py new file mode 100644 index 0000000..7519e37 --- /dev/null +++ b/Report/les_resultats.py @@ -0,0 +1,27 @@ +import numpy +import matplotlib.pyplot as plt + +train_x = [10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, 140, 150, 160, 170, 180, 190, 200, 210, 210, 220, 230, 240, 250, 260, 270, 280, 290, 300, 310, 320, 330, 340, 350, 360, 370, 380, 390, 400, 410, 420, 420, 430, 440, 450, 460, 470, 480, 490, 500, 510, 520, 530, 540, 550, 560, 570, 580, 590, 600, 610, 620, 630, 630, 640, 650, 660, 670, 680, 690, 700, 710, 720, 730, 740, 750, 760, 770, 780, 790, 800, 810, 820, 830, 840, 840, 850, 860, 870, 880, 890, 900, 910, 920, 930, 940, 950, 960, 970, 980, 990, 1000, 1010, 1020, 1030, 1040, 1050, 1050, 1060, 1070, 1080, 1090, 1100, 1110, 1120, 1130, 1140, 1150, 1160, 1170, 1180, 1190, 1200, 1210, 1220, 1230, 1240, 1250, 1260, 1260, 1270, 1280, 1290, 1300, 1310, 1320, 1330, 1340, 1350, 1360, 1370, 1380, 1390, 1400, 1410, 1420, 1430, 1440, 1450, 1460, 1470, 1470, 1480, 1490, 1500, 1510, 1520, 1530, 1540, 1550, 1560, 1570, 1580, 1590, 1600, 1610, 1620, 1630, 1640, 1650, 1660, 1670, 1680, 1680, 1690, 1700, 1710, 1720, 1730, 1740, 1750, 1760, 1770, 1780, 1790, 1800, 1810, 1820, 1830, 1840, 1850, 1860, 1870, 1880, 1890, 1890, 1900, 1910, 1920, 1930, 1940, 1950, 1960, 1970, 1980, 1990, 2000, 2010, 2020, 2030, 2040, 2050, 2060, 2070, 2080, 2090, 2100, 2100, 2110, 2120, 2130, 2140, 2150, 2160, 2170, 2180, 2190, 2200, 2210, 2220, 2230, 2240, 2250, 2260, 2270, 2280, 2290, 2300, 2310, 2310, 2320, 2330, 2340, 2350, 2360, 2370, 2380, 2390, 2400, 2410, 2420, 2430, 2440, 2450, 2460, 2470, 2480, 2490, 2500, 2510, 2520, 2520, 2530, 2540, 2550, 2560, 2570, 2580, 2590, 2600, 2610, 2620, 2630, 2640, 2650, 2660, 2670, 2680, 2690, 2700, 2710, 2720, 2730, 2730, 2740, 2750, 2760, 2770, 2780, 2790, 2800, 2810, 2820, 2830, 2840, 2850, 2860, 2870, 2880, 2890, 2900, 2910, 2920, 2930, 2940, 2940, 2950, 2960, 2970, 2980, 2990, 3000, 3010, 3020, 3030, 3040, 3050, 3060, 3070, 3080, 3090, 3100, 3110, 3120, 3130, 3140, 3150, 3150, 3160, 3170, 3180, 3190, 3200, 3210, 3220, 3230, 3240, 3250, 3260, 3270, 3280, 3290, 3300, 3310, 3320, 3330, 3340, 3350, 3360, 3360, 3370, 3380, 3390, 3400, 3410, 3420, 3430, 3440, 3450, 3460, 3470, 3480, 3490, 3500, 3510, 3520, 3530, 3540, 3550, 3560, 3570, 3570, 3580, 3590, 3600, 3610, 3620, 3630, 3640, 3650, 3660, 3670, 3680, 3690, 3700, 3710, 3720, 3730, 3740, 3750, 3760, 3770, 3780, 3780, 3790, 3800, 3810, 3820, 3830, 3840, 3850, 3860, 3870, 3880, 3890, 3900, 3910, 3920, 3930, 3940, 3950, 3960, 3970, 3980, 3990, 3990, 4000, 4010, 4020, 4030, 4040, 4050, 4060, 4070, 4080, 4090, 4100, 4110, 4120, 4130, 4140, 4150, 4160, 4170, 4180, 4190, 4200, 4200, 4210, 4220, 4230, 4240, 4250, 4260, 4270, 4280, 4290, 4300, 4310, 4320, 4330, 4340, 4350, 4360, 4370, 4380, 4390, 4400, 4410, 4410, 4420, 4430, 4440, 4450, 4460, 4470, 4480, 4490, 4500, 4510, 4520, 4530, 4540, 4550, 4560, 4570, 4580, 4590, 4600, 4610, 4620, 4620, 4630, 4640, 4650, 4660, 4670, 4680, 4690, 4700, 4710, 4720, 4730, 4740, 4750, 4760, 4770, 4780, 4790, 4800, 4810, 4820, 4830, 4830, 4840, 4850, 4860, 4870, 4880, 4890, 4900, 4910, 4920, 4930, 4940, 4950, 4960, 4970, 4980, 4990, 5000, 5010, 5020, 5030, 5040, 5040, 5050, 5060, 5070, 5080, 5090, 5100, 5110, 5120, 5130, 5140, 5150, 5160, 5170, 5180, 5190, 5200, 5210, 5220, 5230, 5240, 5250, 5250, 5260, 5270, 5280, 5290, 5300, 5310, 5320, 5330, 5340, 5350, 5360, 5370, 5380, 5390, 5400, 5410, 5420, 5430, 5440, 5450, 5460, 5460, 5470, 5480, 5490, 5500, 5510, 5520, 5530, 5540, 5550, 5560, 5570, 5580, 5590, 5600, 5610, 5620, 5630, 5640, 5650, 5660, 5670, 5670, 5680, 5690, 5700, 5710, 5720, 5730, 5740, 5750, 5760, 5770, 5780, 5790, 5800, 5810, 5820, 5830, 5840, 5850, 5860, 5870, 5880, 5880, 5890, 5900, 5910, 5920, 5930, 5940, 5950, 5960, 5970, 5980, 5990, 6000, 6010, 6020, 6030, 6040, 6050, 6060, 6070, 6080, 6090, 6090, 6100, 6110, 6120, 6130, 6140, 6150, 6160, 6170, 6180, 6190, 6200, 6210, 6220, 6230, 6240, 6250, 6260, 6270, 6280, 6290, 6300, 6300, 6310, 6320, 6330, 6340, 6350, 6360, 6370, 6380, 6390, 6400, 6410, 6420, 6430, 6440, 6450, 6460, 6470, 6480, 6490, 6500, 6510, 6510, 6520, 6530, 6540, 6550, 6560, 6570, 6580, 6590, 6600, 6610, 6620, 6630, 6640, 6650, 6660, 6670, 6680, 6690, 6700, 6710, 6720, 6720, 6730, 6740, 6750, 6760, 6770, 6780, 6790, 6800, 6810, 6820, 6830, 6840, 6850, 6860, 6870, 6880, 6890, 6900, 6910, 6920, 6930, 6930, 6940, 6950, 6960, 6970, 6980, 6990, 7000, 7010, 7020, 7030, 7040, 7050, 7060, 7070, 7080, 7090, 7100, 7110, 7120, 7130, 7140, 7140, 7150, 7160, 7170, 7180] +train_cost =[186.58523559570312, 231.2848358154297, 268.4850158691406, 341.6783752441406, 236.5081787109375, 196.24838256835938, 226.8048095703125, 286.9937438964844, 266.5400085449219, 175.7088165283203, 200.8835906982422, 242.95028686523438, 291.7144775390625, 162.75872802734375, 181.70126342773438, 211.9771270751953, 298.81695556640625, 147.48509216308594, 170.39553833007812, 186.9927520751953, 298.29949951171875, 298.29949951171875, 133.5607147216797, 156.2488555908203, 169.14920043945312, 241.3719940185547, 185.77462768554688, 144.39511108398438, 158.01193237304688, 205.6425323486328, 210.33004760742188, 137.01002502441406, 148.1883087158203, 180.172607421875, 230.05520629882812, 128.0877685546875, 138.49575805664062, 161.72207641601562, 238.3956298828125, 120.91993713378906, 131.71182250976562, 150.835693359375, 248.25296020507812, 248.25296020507812, 112.76859283447266, 126.1996078491211, 138.47633361816406, 201.5886688232422, 159.96022033691406, 123.56022644042969, 131.53488159179688, 175.68978881835938, 183.5472412109375, 131.619873046875, 130.74710083007812, 157.12222290039062, 199.36105346679688, 114.35847473144531, 120.84625244140625, 139.30274963378906, 207.83334350585938, 109.0135726928711, 115.93318176269531, 130.4647979736328, 219.6696319580078, 219.6696319580078, 102.07911682128906, 112.4325942993164, 124.21354675292969, 182.14559936523438, 146.0809783935547, 109.84268951416016, 119.76466369628906, 157.3028564453125, 168.3053436279297, 107.03828430175781, 114.07322692871094, 139.56552124023438, 179.39996337890625, 103.29052734375, 109.51301574707031, 129.4232635498047, 197.06076049804688, 102.82859802246094, 117.35661315917969, 122.02813720703125, 198.33517456054688, 198.33517456054688, 95.299072265625, 104.1549072265625, 112.89962005615234, 163.8797607421875, 131.33120727539062, 101.44754028320312, 108.68290710449219, 144.9304962158203, 152.9595489501953, 99.14348602294922, 104.69212341308594, 130.31112670898438, 171.32177734375, 96.97526550292969, 102.92271423339844, 120.22615051269531, 183.78558349609375, 96.27936553955078, 99.43331909179688, 111.60197448730469, 190.6114959716797, 190.6114959716797, 93.12297058105469, 98.99793243408203, 106.05448913574219, 155.9643096923828, 125.0423812866211, 95.38441467285156, 101.40775299072266, 139.3518524169922, 152.1620330810547, 94.94572448730469, 99.42830657958984, 121.4210205078125, 161.89910888671875, 92.0219497680664, 95.9166259765625, 114.2807388305664, 171.328369140625, 90.45287322998047, 93.57655334472656, 105.33430480957031, 181.9762725830078, 181.9762725830078, 87.84622192382812, 91.97550964355469, 98.90853881835938, 152.52560424804688, 126.38728332519531, 92.04643249511719, 98.82345581054688, 131.1653289794922, 139.3582305908203, 89.69316101074219, 95.1003646850586, 115.2433853149414, 154.51121520996094, 87.71465301513672, 90.84709167480469, 108.09507751464844, 167.04690551757812, 87.98078918457031, 92.11004638671875, 102.40474700927734, 172.33309936523438, 172.33309936523438, 83.36625671386719, 88.2079086303711, 95.22660064697266, 141.96542358398438, 116.61067962646484, 86.76722717285156, 91.27314758300781, 126.1171646118164, 137.31405639648438, 86.92185974121094, 89.49848175048828, 108.82087707519531, 151.90994262695312, 86.15975189208984, 87.28550720214844, 101.3384780883789, 153.85289001464844, 83.75611877441406, 85.4171371459961, 93.38385009765625, 162.8311309814453, 162.8311309814453, 81.77302551269531, 83.9618911743164, 90.37879943847656, 137.42535400390625, 108.82861328125, 83.769287109375, 87.9117202758789, 128.86685180664062, 130.32766723632812, 82.22637939453125, 85.20014953613281, 105.11083984375, 142.09616088867188, 81.00532531738281, 82.98677062988281, 96.0823745727539, 151.67507934570312, 81.98279571533203, 84.234130859375, 90.83128356933594, 157.9053192138672, 157.9053192138672, 78.5243911743164, 80.73138427734375, 87.2260971069336, 131.54022216796875, 106.52214050292969, 80.36766052246094, 83.270263671875, 118.41828918457031, 123.03299713134766, 79.18318176269531, 81.58293151855469, 100.9095687866211, 138.06655883789062, 77.97090911865234, 80.31898498535156, 93.3165512084961, 148.88990783691406, 78.63114929199219, 79.39232635498047, 87.46602630615234, 150.57052612304688, 150.57052612304688, 74.4915771484375, 77.86602020263672, 85.085205078125, 126.7571792602539, 102.62113952636719, 77.26814270019531, 81.88603210449219, 111.36767578125, 123.3438491821289, 76.97804260253906, 79.98005676269531, 102.70826721191406, 132.047607421875, 76.29537963867188, 77.84678649902344, 87.66104888916016, 140.47149658203125, 75.1488037109375, 77.91696166992188, 83.76225280761719, 146.69065856933594, 146.69065856933594, 73.37548828125, 75.57395935058594, 80.6111068725586, 122.05367279052734, 100.41688537597656, 74.68812561035156, 77.81558990478516, 111.53568267822266, 119.72019958496094, 74.37997436523438, 76.07890319824219, 97.34192657470703, 125.8359375, 80.32325744628906, 75.70204162597656, 86.45953369140625, 135.6238555908203, 72.93254089355469, 73.78556823730469, 80.69652557373047, 144.58584594726562, 144.58584594726562, 71.6277847290039, 72.7505874633789, 77.8028335571289, 117.4685287475586, 96.89445495605469, 72.18317413330078, 76.11204528808594, 101.96377563476562, 111.3445816040039, 71.81771850585938, 73.37092590332031, 92.38727569580078, 128.2201385498047, 75.30054473876953, 74.13258361816406, 83.42876434326172, 128.43910217285156, 70.60649108886719, 72.1220474243164, 77.58851623535156, 136.3867950439453, 136.3867950439453, 68.58518981933594, 70.98548889160156, 76.19511413574219, 115.77790832519531, 97.15103149414062, 70.9467544555664, 73.04668426513672, 99.71874237060547, 106.4306640625, 69.07820892333984, 72.92381286621094, 89.81681823730469, 124.00859069824219, 70.0996322631836, 71.41864776611328, 81.69154357910156, 132.60458374023438, 71.89607238769531, 70.84252166748047, 76.05951690673828, 131.5980987548828, 131.5980987548828, 66.73311614990234, 69.84365844726562, 73.22160339355469, 113.47918701171875, 103.18702697753906, 70.29900360107422, 74.66051483154297, 97.21063995361328, 101.74390411376953, 68.29640197753906, 69.72774505615234, 84.18803405761719, 117.13846588134766, 68.83837890625, 68.65928649902344, 78.08269500732422, 125.20963287353516, 67.17547607421875, 67.62342834472656, 74.69725799560547, 131.299560546875, 131.299560546875, 66.20576477050781, 67.56465911865234, 70.46302795410156, 108.74079895019531, 87.25257873535156, 66.88219451904297, 68.388671875, 96.19306945800781, 110.92474365234375, 67.10926818847656, 68.50953674316406, 83.0287857055664, 117.23604583740234, 69.73292541503906, 67.70140838623047, 76.39694213867188, 118.3641128540039, 66.11502075195312, 77.0927505493164, 74.20610046386719, 122.42735290527344, 122.42735290527344, 66.75979614257812, 66.8727798461914, 72.0111312866211, 99.39823150634766, 83.5907974243164, 65.16789245605469, 66.34971618652344, 92.41311645507812, 96.60044860839844, 64.5789566040039, 65.20745086669922, 80.49311828613281, 117.85589599609375, 70.88825225830078, 65.9358901977539, 74.72298431396484, 115.99299621582031, 65.42508697509766, 64.65568542480469, 70.11262512207031, 121.27635192871094, 121.27635192871094, 63.4225959777832, 63.612648010253906, 67.42547607421875, 102.3966064453125, 89.56172180175781, 64.49107360839844, 65.0595474243164, 87.95652770996094, 96.8666000366211, 63.4747428894043, 64.94310760498047, 78.38616180419922, 108.6335678100586, 63.50325393676758, 64.29096984863281, 74.0794677734375, 118.99910736083984, 64.08467102050781, 63.20738983154297, 67.41957092285156, 120.48453521728516, 120.48453521728516, 62.75016403198242, 62.72233200073242, 66.0561294555664, 101.48845672607422, 77.67181396484375, 63.402793884277344, 64.58616638183594, 93.43408966064453, 102.4688720703125, 63.1336784362793, 62.73102569580078, 76.08720397949219, 114.50164794921875, 63.35431671142578, 63.4862174987793, 69.34383392333984, 109.49269104003906, 63.25361251831055, 63.601593017578125, 66.90702056884766, 117.00096130371094, 117.00096130371094, 61.197845458984375, 61.28681182861328, 64.1302261352539, 98.46504211425781, 81.71538543701172, 61.749969482421875, 61.91248321533203, 84.7287368774414, 94.66127014160156, 61.2975959777832, 61.72505569458008, 85.80812072753906, 98.3292007446289, 61.599403381347656, 60.64464569091797, 72.69552612304688, 106.3861083984375, 59.685752868652344, 60.333763122558594, 66.51243591308594, 110.71014404296875, 110.71014404296875, 59.2495231628418, 59.7466926574707, 64.67130279541016, 95.10276794433594, 77.95721435546875, 60.2974739074707, 60.869384765625, 87.06355285644531, 91.59613800048828, 60.35717010498047, 61.14387130737305, 75.7072525024414, 120.1728515625, 67.25605773925781, 62.649757385253906, 68.53819274902344, 101.90452575683594, 59.9113655090332, 59.49762725830078, 63.416770935058594, 104.64503479003906, 104.64503479003906, 57.59880447387695, 58.96482467651367, 62.84669876098633, 92.8679428100586, 75.42010498046875, 61.5623893737793, 63.050079345703125, 83.56361389160156, 91.00521087646484, 59.494140625, 64.5066146850586, 98.4172134399414, 106.7485580444336, 67.62139892578125, 68.00704956054688, 72.58460998535156, 96.7760009765625, 60.5396614074707, 61.40576171875, 64.27639770507812, 100.02091979980469, 100.02091979980469, 56.78606033325195, 58.3913688659668, 60.33882522583008, 90.1707534790039, 74.36104583740234, 58.254234313964844, 58.073387145996094, 79.17134094238281, 88.10829162597656, 57.819313049316406, 57.3053092956543, 72.53285217285156, 99.56086730957031, 59.077232360839844, 58.34284591674805, 65.54971313476562, 102.39838409423828, 57.807456970214844, 56.773719787597656, 61.29707717895508, 129.23179626464844, 129.23179626464844, 63.75461959838867, 65.4126968383789, 65.79841613769531, 91.47257232666016, 72.67633819580078, 60.79298782348633, 59.847129821777344, 78.55244445800781, 79.09380340576172, 58.3369255065918, 57.52428436279297, 68.28458404541016, 86.54408264160156, 57.3004035949707, 56.52373504638672, 62.52003860473633, 94.0240707397461, 57.260223388671875, 56.5450439453125, 60.10783767700195, 103.21888732910156, 103.21888732910156, 56.29463577270508, 56.37690353393555, 57.68669891357422, 90.02476501464844, 72.69828796386719, 56.312225341796875, 56.296226501464844, 79.04252624511719, 89.03620147705078, 57.0503044128418, 56.09937286376953, 71.69039916992188, 103.64656066894531, 64.68502807617188, 58.32489776611328, 63.13714599609375, 94.18540954589844, 56.382232666015625, 55.02116012573242, 59.633148193359375, 100.21430969238281, 100.21430969238281, 55.70552444458008, 55.889007568359375, 57.354644775390625, 86.56227111816406, 71.72993469238281, 55.203094482421875, 56.14110565185547, 77.64220428466797, 87.61656188964844, 56.01732635498047, 56.3336067199707, 70.03968811035156, 90.45295715332031, 55.19712448120117, 54.34111404418945, 62.1539192199707, 101.70499420166016, 58.18134689331055, 55.41808319091797, 58.140541076660156, 107.4736557006836, 107.4736557006836, 63.69533157348633, 58.38372039794922, 57.561012268066406, 80.69139099121094, 66.34272766113281, 55.101318359375, 53.94350051879883, 71.37568664550781, 80.2947006225586, 54.42975997924805, 53.98969268798828, 66.13937377929688, 97.61644744873047, 59.25038528442383, 55.36942672729492, 61.2443962097168, 90.33424377441406, 54.00713348388672, 54.1462516784668, 56.25630569458008, 93.71345520019531, 93.71345520019531, 53.76762008666992, 54.99840545654297, 58.410118103027344, 92.22172546386719, 72.87686920166016, 55.26332473754883, 53.97492218017578, 73.10933685302734, 78.8005599975586, 53.8826789855957, 52.685951232910156, 65.84062194824219, 93.15460968017578, 55.31293869018555, 53.404823303222656, 60.04029083251953, 93.31055450439453, 57.60600662231445, 54.57752227783203, 56.5758056640625, 96.80931091308594, 96.80931091308594, 57.15104293823242, 53.555030822753906, 54.15336227416992, 79.26002502441406, 69.00534057617188, 53.702781677246094, 54.45573043823242, 73.52928161621094, 88.3958740234375, 59.07117462158203, 54.78680419921875, 62.48712921142578, 81.0561752319336, 53.82013702392578, 51.609405517578125, 65.02668762207031, 85.89973449707031, 52.37298583984375, 59.02973175048828, 56.21425247192383, 89.510986328125, 89.510986328125, 51.91289138793945, 52.08428955078125, 53.18146896362305, 82.71158599853516, 65.89899444580078, 52.647216796875, 55.69282913208008, 69.70824432373047, 73.98417663574219, 51.95792770385742, 51.611351013183594, 61.88373947143555, 87.11563110351562, 63.61185836791992, 56.17607498168945, 59.37639617919922, 84.23460388183594, 52.92940139770508, 52.14185333251953, 54.071044921875, 88.82882690429688, 88.82882690429688, 50.680030822753906, 51.265777587890625, 53.25732421875, 86.82232666015625, 63.775428771972656, 51.985504150390625, 51.64417266845703, 74.45440673828125, 82.09894561767578, 52.4028434753418, 51.156578063964844, 62.92765426635742, 90.60025024414062, 54.31724166870117, 51.569862365722656, 58.97062301635742, 84.95948791503906, 53.263824462890625, 52.2595329284668, 54.409217834472656, 95.94291687011719, 95.94291687011719, 55.44050216674805, 64.35740661621094, 57.17742919921875, 75.88489532470703, 59.90824508666992, 51.412872314453125, 50.87439727783203, 67.61165618896484, 70.25477600097656, 50.390682220458984, 50.11556625366211, 64.45280456542969, 85.7797622680664, 53.36262893676758, 50.269500732421875, 57.17934036254883, 88.0357666015625, 51.14348602294922, 50.049049377441406, 52.90114212036133, 92.9959487915039, 92.9959487915039, 51.27323532104492, 50.2881965637207, 54.0728645324707, 85.35533142089844, 66.60649108886719, 50.510459899902344, 49.99077606201172, 66.27311706542969, 82.9278335571289, 58.38496780395508, 53.43803787231445, 62.60850143432617, 74.76079559326172, 51.48041534423828, 49.733985900878906, 54.603729248046875, 80.97642517089844, 55.8668327331543, 51.910423278808594, 52.561973571777344, 83.47212219238281, 83.47212219238281, 60.261390686035156, 57.36661911010742, 53.60051727294922, 71.9022445678711, 57.912445068359375, 50.25806427001953, 49.1870002746582, 63.5728759765625, 68.0941390991211, 49.70363235473633, 48.87373352050781, 58.7414436340332, 81.9002914428711, 50.26862335205078, 48.34414291381836, 57.756858825683594, 89.35389709472656, 51.31721115112305, 49.74910354614258, 69.26002502441406, 83.78556060791016, 83.78556060791016, 48.9345703125, 49.421024322509766, 49.83205795288086, 71.89759826660156] + +valid_x = [0, 42, 84, 126, 168, 210, 252, 294, 336, 378, 420, 462, 504, 546, 588, 630, 672, 714, 756, 798, 840, 882, 924, 966, 1008, 1050, 1092, 1134, 1176, 1218, 1260, 1302, 1344, 1386, 1428, 1470, 1512, 1554, 1596, 1638, 1680, 1722, 1764, 1806, 1848, 1890, 1932, 1974, 2016, 2058, 2100, 2142, 2184, 2226, 2268, 2310, 2352, 2394, 2436, 2478, 2520, 2562, 2604, 2646, 2688, 2730, 2772, 2814, 2856, 2898, 2940, 2982, 3024, 3066, 3108, 3150, 3192, 3234, 3276, 3318, 3360, 3402, 3444, 3486, 3528, 3570, 3612, 3654, 3696, 3738, 3780, 3822, 3864, 3906, 3948, 3990, 4032, 4074, 4116, 4158, 4200, 4242, 4284, 4326, 4368, 4410, 4452, 4494, 4536, 4578, 4620, 4662, 4704, 4746, 4788, 4830, 4872, 4914, 4956, 4998, 5040, 5082, 5124, 5166, 5208, 5250, 5292, 5334, 5376, 5418, 5460, 5502, 5544, 5586, 5628, 5670, 5712, 5754, 5796, 5838, 5880, 5922, 5964, 6006, 6048, 6090, 6132, 6174, 6216, 6258, 6300, 6342, 6384, 6426, 6468, 6510, 6552, 6594, 6636, 6678, 6720, 6762, 6804, 6846, 6888, 6930, 6972, 7014, 7056, 7098, 7140, 7182] +valid_cost =[364.5904235839844, 279.127685546875, 264.63031005859375, 249.2937469482422, 232.4756317138672, 235.8790283203125, 215.7898712158203, 208.1250457763672, 199.77835083007812, 189.05300903320312, 191.4433135986328, 192.36190795898438, 187.217041015625, 175.91787719726562, 171.91650390625, 172.51966857910156, 174.4485626220703, 171.8967742919922, 163.4190216064453, 175.60372924804688, 157.4044189453125, 168.50167846679688, 169.2763214111328, 161.4881591796875, 174.42529296875, 167.9474334716797, 161.90689086914062, 162.2763671875, 160.785888671875, 156.46385192871094, 161.48812866210938, 158.58535766601562, 159.79159545898438, 157.05345153808594, 153.82350158691406, 151.98086547851562, 158.80264282226562, 162.184814453125, 157.32852172851562, 150.48904418945312, 161.20620727539062, 148.552490234375, 149.32762145996094, 148.5352325439453, 151.89700317382812, 153.10687255859375, 149.1084442138672, 148.3770751953125, 140.5742950439453, 153.5819854736328, 142.99942016601562, 152.76815795898438, 152.29672241210938, 145.40451049804688, 145.81600952148438, 147.19259643554688, 153.97625732421875, 146.57736206054688, 138.8104705810547, 150.4037322998047, 148.7781524658203, 140.95362854003906, 141.92892456054688, 157.51220703125, 144.71168518066406, 148.02027893066406, 148.974853515625, 134.40692138671875, 147.7875213623047, 163.51719665527344, 142.8425750732422, 144.03854370117188, 144.1996307373047, 144.03939819335938, 145.361083984375, 148.8046112060547, 141.1871337890625, 153.98260498046875, 149.01589965820312, 137.35989379882812, 144.11614990234375, 134.5603485107422, 137.90049743652344, 140.70523071289062, 145.3502197265625, 145.46055603027344, 157.419677734375, 138.19113159179688, 143.32444763183594, 149.907470703125, 142.496337890625, 145.5762481689453, 161.54122924804688, 160.5546112060547, 138.96588134765625, 151.0968780517578, 151.427490234375, 151.1297149658203, 140.4689178466797, 134.4274444580078, 144.43069458007812, 152.95999145507812, 144.57208251953125, 164.4092559814453, 134.485595703125, 140.07579040527344, 146.0001220703125, 144.75619506835938, 136.7599639892578, 139.87179565429688, 142.1566619873047, 139.58596801757812, 151.96633911132812, 155.36654663085938, 145.63656616210938, 135.86512756347656, 132.28634643554688, 131.9317169189453, 134.84188842773438, 143.6789093017578, 155.36917114257812, 153.2019500732422, 160.43850708007812, 201.95516967773438, 140.56777954101562, 142.15231323242188, 147.2138214111328, 145.48768615722656, 142.57870483398438, 155.54393005371094, 159.6879119873047, 142.1046600341797, 142.68450927734375, 163.3708953857422, 139.8641815185547, 151.1313934326172, 156.3168487548828, 141.163330078125, 157.93283081054688, 156.18775939941406, 151.70700073242188, 165.7404327392578, 171.81130981445312, 146.48873901367188, 135.18301391601562, 145.9766082763672, 150.2149200439453, 147.5810089111328, 157.72586059570312, 141.519775390625, 146.76144409179688, 142.7743377685547, 159.87709045410156, 173.76075744628906, 145.61489868164062, 157.43272399902344, 138.87413024902344, 142.043212890625, 156.00039672851562, 151.99139404296875, 153.26287841796875, 153.92393493652344, 176.95742797851562, 140.74777221679688, 157.04075622558594, 145.801025390625, 139.25115966796875, 143.51907348632812, 153.39659118652344, 159.03585815429688, 148.12803649902344, 143.02743530273438] +valid_error_rate = [1.2631086724996565, 0.9049824843406677, 0.8738996247053146, 0.8520578233003617, 0.8069766317009925, 0.7681459421515464, 0.766872273027897, 0.7491659426689148, 0.7123492771685124, 0.6871207047700881, 0.6942619448900224, 0.6871486190557479, 0.6655759500265122, 0.6348962524533271, 0.620890975087881, 0.6201776292324066, 0.6299488288760186, 0.6159248094409705, 0.5862323707044125, 0.627962991863489, 0.5594458154737949, 0.593076811954379, 0.589563570290804, 0.5691290779411793, 0.6070170089453459, 0.5905831027179957, 0.5695668856054544, 0.5660287208855153, 0.5677807168364525, 0.5521207180321216, 0.5663520123064518, 0.5586509834080935, 0.5514402708262205, 0.5534750644303859, 0.5422407572418452, 0.5396684825308621, 0.5488098974972963, 0.5643667361587286, 0.545242833584547, 0.5254568337388337, 0.5597240672558547, 0.5199276773482561, 0.520817144818604, 0.5169221287369727, 0.5302750275880098, 0.5289107389003038, 0.522408181130886, 0.5169573311954736, 0.4934916146248579, 0.5301236988827587, 0.498966355279088, 0.5238550244271756, 0.5258710251599551, 0.5114895230084657, 0.5084134049005806, 0.5150075511857868, 0.5278118780627847, 0.5095432948619127, 0.4873661266863346, 0.5116823883354664, 0.51284998396039, 0.48653117391839623, 0.48924323385208845, 0.5343654067665338, 0.4944168768525124, 0.5068075152076781, 0.5128700341209769, 0.4751329269856215, 0.49316138063371184, 0.5400419630259276, 0.491230521723628, 0.5007935844287277, 0.4912514726743102, 0.5000968825370073, 0.4947881328389048, 0.5118980471640825, 0.48360401929914953, 0.5234760774672031, 0.49938904367387293, 0.47671538335829977, 0.4976945612579584, 0.4618954054489731, 0.4747857638187707, 0.47373887019604444, 0.49371623235940937, 0.4931437728404998, 0.5230170909538865, 0.4664031620025635, 0.4814914083182812, 0.5132393276244402, 0.48357627378404144, 0.47624510475248094, 0.5292552447021007, 0.5305621960163116, 0.4725723287425936, 0.4878924035653472, 0.4904125231653452, 0.5032274387106299, 0.47158222571015357, 0.4531631511747837, 0.48311221682280303, 0.5060078546404838, 0.48435146336257456, 0.5409567308723926, 0.4567420070692897, 0.4623193391561508, 0.4825072743743658, 0.4845527048557997, 0.47439751113951206, 0.4572945342361927, 0.4623396928608418, 0.46393002089113, 0.4915669990628958, 0.5063451097272338, 0.4785871240794659, 0.4636869314834476, 0.4507933519780636, 0.4380083803758025, 0.4489776850789785, 0.4674644271954894, 0.509185853473842, 0.48352455613017076, 0.5095404759347439, 0.610230032503605, 0.4662731768861413, 0.4730758576318622, 0.48341961322724825, 0.4860790433138609, 0.46840287341177467, 0.5001271432191133, 0.5305484683215618, 0.4617945815399289, 0.46642581181228165, 0.5311559997946024, 0.45677104163914917, 0.48823492068797353, 0.5132545208930969, 0.45595704911649226, 0.5038564239367843, 0.5027258859947323, 0.4980815825462342, 0.5182936546057463, 0.5535126847475766, 0.4707693644203245, 0.44122700073570015, 0.46331854020804164, 0.4779908500313759, 0.47446214014291765, 0.5061822877563535, 0.4630262134373188, 0.4664954633265733, 0.4644186313264072, 0.5117025197669863, 0.5258631027042865, 0.4724766698926687, 0.5154936393871903, 0.44895674090832466, 0.4633070482164621, 0.5003406125605107, 0.4815749708041549, 0.48722364073246716, 0.4945867823511362, 0.5563696938306093, 0.4610647192522883, 0.49441255182772875, 0.46562350630760185, 0.4460823050700128, 0.46256216906011105, 0.4789765599220991, 0.5049631608277558, 0.4614690849483013, 0.4679197465628386] + +train_x = numpy.array(train_x) +train_cost = numpy.array(train_cost) + +train_x = train_x[::5][:150] +train_cost = sum(train_cost[i::5][:150] for i in range(5)) / 5. + +print train_x.shape +print train_cost.shape + +#plt.plot(train_x, train_cost, label='train cost') +#plt.plot(valid_x, valid_cost, label='valid cost') +#plt.legend() +#plt.show() + +plt.plot(valid_x, valid_error_rate, label='valid error rate') +plt.legend() +plt.show() diff --git a/Report/mila.png b/Report/mila.png new file mode 100644 index 0000000..5e6d2cf Binary files /dev/null and b/Report/mila.png differ diff --git a/Report/pouet_timit_ER.png b/Report/pouet_timit_ER.png new file mode 100644 index 0000000..9dd3e16 Binary files /dev/null and b/Report/pouet_timit_ER.png differ diff --git a/Report/pouet_timit_cost.png b/Report/pouet_timit_cost.png new file mode 100644 index 0000000..b94f906 Binary files /dev/null and b/Report/pouet_timit_cost.png differ -- cgit v1.2.3