@inproceedings{SarkarGepperthHandmannetal.2017, author = {Ayanava Sarkar and Alexander Gepperth and Uwe Handmann and Thomas Kopinski}, title = {Dynamic Hand Gesture Recognition for Mobile Systems Using Deep LSTM}, series = {Intelligent Human Computer Interaction. IHCI 2017. Lecture Notes in Computer Science}, number = {vol. 10688}, publisher = {Springer}, isbn = {978-3-319-72038-8}, doi = {https://doi.org/10.1007/978-3-319-72038-8\_3}, pages = {19 -- 31}, year = {2017}, abstract = {We present a pipeline for recognizing dynamic freehand gestures on mobile devices based on extracting depth information coming from a single Time-of-Flight sensor. Hand gestures are recorded with a mobile 3D sensor, transformed frame by frame into an appropriate 3D descriptor and fed into a deep LSTM network for recognition purposes. LSTM being a recurrent neural model, it is uniquely suited for classifying explicitly time-dependent data such as hand gestures. For training and testing purposes, we create a small database of four hand gesture classes, each comprising 40 × 150 3D frames. We conduct experiments concerning execution speed on a mobile device, generalization capability as a function of network topology, and classification ability ‘ahead of time’, i.e., when the gesture is not yet completed. Recognition rates are high (>95\%) and maintainable in real-time as a single classification step requires less than 1 ms computation time, introducing freehand gestures for mobile systems.}, language = {en} }