@InProceedings{VolkmarFrinken2012, author="Volkmar Frinken and Francisco Zamora and Salvador Espa{\~n}a and Maria Jose Castro and Andreas Fischer and Horst Bunke", title="Long-Short Term Memory Neural Networks Language Modeling for Handwriting Recognition", booktitle="21st International Conference on Pattern Recognition", year="2012", pages="701--704", abstract="Unconstrained handwritten text recognition systems maximize the combination of two separate probability scores. The first one is the observation probability that indicates how well the returned word sequence matches the input image. The second score is the probability that reflects how likely a word sequence is according to a language model. Current state-of-the-art recognition systems use statistical language models in form of bigram word probabilities. This paper proposes to model the target language by means of a recurrent neural network with long-short term memory cells. Because the network is recurrent, the considered context is not limited to a fixed size especially as the memory cells are designed to deal with long-term dependencies. In a set of experiments conducted on the IAM off-line database we show the superiority of the proposed language model over statistical n-gram models.", optnote="DAG", optnote="exported from refbase (http://refbase.cvc.uab.es/show.php?record=2052), last updated on Thu, 13 Mar 2014 10:20:11 +0100", isbn="978-1-4673-2216-4", issn="1051-4651", file=":http://refbase.cvc.uab.es/files/FZE2012.pdf:PDF" }