@article{hinckley2009synchronous, author = {Hinckley, Ken and Wilson, Andy and Sarin, Raman}, title = {Synchronous Gestures in Multi-Display Environments}, year = {2009}, month = {April}, abstract = {Synchronous gestures are patterns of sensed user or users' activity, spanning a distributed system that take on a new meaning when they occur together in time. Synchronous gestures draw inspiration from real-world social rituals such as toasting by tapping two drinking glasses together. In this article, we explore several interactions based on synchronous gestures, including bumping devices together, drawing corresponding pen gestures on touch-sensitive displays, simultaneously pressing a button on multiple smart-phones, or placing one or more devices on the sensing surface of a tabletop computer. These interactions focus on wireless composition of physically colocated devices, where users perceive one another and coordinate their actions through social protocol. We demonstrate how synchronous gestures may be phrased together with surrounding interactions. Such connection-action phrases afford a rich syntax of cross-device commands, operands, and one-to-one or one-to-many associations with a flexible physical arrangement of devices. Synchronous gestures enable colocated users to combine multiple devices into a heterogeneous display environment, where the users may establish a transient network connection with other select colocated users to facilitate the pooling of input capabilities, display resources, and the digital contents of each device. For example, participants at a meeting may bring mobile devices including tablet computers, PDAs, and smart-phones, and the meeting room infrastructure may include fixed interactive displays, such as a tabletop computer. Our techniques facilitate creation of an ad hoc display environment for tasks such as viewing a large document across multiple devices, presenting information to another user, or offering files to others. The interactions necessary to establish such ad hoc display environments must be rapid and minimally demanding of attention: during face-to-face communication, a pause of even 5 sec is socially awkward and disrupts collaboration. Current devices may associate using a direct transport such as Infrared Data Association ports, or the emerging Near Field Communication standard. However, such transports can only support one-to-one associations between devices and require close physical proximity as well as a specific relative orientation to connect the devices (e.g., the devices may be linked when touching head-to-head but not side-to-side). By contrast, sociology research in proxemics (the study of how people use the “personal space” surrounding their bodies) demonstrates that people carefully select physical distance as well as relative body orientation to suit the task, mood, and social relationship with other persons. Wireless networking can free device-to-device connections from the limitations of direct transports but results in a potentially large number of candidate devices. Synchronous gestures address these problems by allowing users to express naturally a spontaneous wireless connection between specific proximal (collocated) interactive displays.}, url = {http://approjects.co.za/?big=en-us/research/publication/synchronous-gestures-multi-display-environments/}, pages = {117-169}, journal = {Human-Computer Interaction}, volume = {24}, }