@article{andrist2022developing, author = {Andrist, Sean and Bohus, Dan and Feniello, Ashley and Saw, Nick}, title = {Developing Mixed Reality Applications with Platform for Situated Intelligence}, year = {2022}, month = {February}, abstract = {Both industry and academic interest in mixed reality has skyrocketed in recent years. New headset devices for both virtual and augmented reality are increasingly available and affordable, and new APIs, tools, and frameworks enable developers and researchers to more easily create mixed reality applications. While many tools aim to make it easier to create and interact with content rendered to the head-set, these new devices are interesting not just from an output, but also from an input perspective-they contain powerful multimodal sensors that provide unique opportunities to drive forward research on egocentric perception and interaction. In this paper, we intro-duce Platform for Situated Intelligence-an existing open-source framework-to the mixed reality community. The framework was designed to help developers and researchers create and study real-time, interactive AI systems that process multimodal streaming data. Recent extensions to the framework include new capabilities and components designed specifically to support mixed reality sensory streams and scenarios.}, url = {http://approjects.co.za/?big=en-us/research/publication/developing-mixed-reality-applications-with-platform-for-situated-intelligence/}, pages = {48-50}, journal = {2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)}, }