@techreport{patil2018gesturepod, author = {Patil, Shishir and Dennis, Don Kurian and Pabbaraju, Chirag and Shaheer, Nadeem and Simhadri, Harsha and Seshadri, Vivek and Varma, Manik and Jain, Prateek}, title = {GesturePod: Gesture-based Interaction Cane for People with Visual Impairments}, institution = {Microsoft}, year = {2018}, month = {May}, abstract = {People using white canes for navigation face challenges concurrently accessing other devices, e.g., smartphones. Building on recent research on abandonment of specialized devices, we explore a new touch free mode of interaction, wherein a person with visual impairment performs gestures on their existing white cane to trigger tasks on their smartphone. We present an easy-to-integrate GesturePod, that clips on to any white cane and enables the detection of gestures performed with the cane. GesturePod, thereby, helps manage a smartphone without touch, or removing the phone from a pocket or bag. In this paper, we present design decisions and challenges in building the pod. We propose a novel, efficient machine learning pipeline to train and deploy the model. Our in-lab study shows that GesturePod achieves >92% gesture recognition accuracy and can significantly reduce the time taken for common smartphone tasks. Our in-wild study suggests that GesturePod is a promising interaction tool for smartphone, especially in constrained outdoor scenarios.}, url = {http://approjects.co.za/?big=en-us/research/publication/gesturepod-programmable-gesture-recognition-augmenting-assistive-devices/}, number = {MSR-TR-2018-14}, }