@inproceedings{harrison2011omnitouch, author = {Harrison, Chris and Benko, Hrvoje and Wilson, Andy}, title = {OmniTouch: Wearable Multitouch Interaction Everywhere}, booktitle = {UIST '11 Proceedings of the 24th annual ACM symposium on User interface software and technology}, year = {2011}, month = {October}, abstract = {OmniTouch is a wearable depth-sensing and projection system that enables interactive multitouch applications on everyday surfaces. Beyond the shoulder-worn system, there is no instrumentation of the user or environment. Foremost, the system allows the wearer to use their hands, arms and legs as graphical, interactive surfaces. Users can also transiently appropriate surfaces from the environment to expand the interactive area (e.g., books, walls, tables). On such surfaces - without any calibration - OmniTouch provides capabilities similar to that of a mouse or touchscreen: X and Y location in 2D interfaces and whether fingers are “clicked” or hovering, enabling a wide variety of interactions. Reliable operation on the hands, for example, requires buttons to be 2.3cm in diameter. Thus, it is now conceivable that anything one can do on today’s mobile devices, they could do in the palm of their hand.}, publisher = {ACM}, url = {http://approjects.co.za/?big=en-us/research/publication/omnitouch-wearable-multitouch-interaction-everywhere/}, pages = {441-450}, }