@article{luidolt-2020-lightperceptionVR, title = "Gaze-Dependent Simulation of Light Perception in Virtual Reality", author = "Laura R. Luidolt and Michael Wimmer and Katharina Kr\"{o}sl", year = "2020", abstract = "The perception of light is inherently different inside a virtual reality (VR) or augmented reality (AR) simulation when compared to the real world. Conventional head-worn displays (HWDs) are not able to display the same high dynamic range of brightness and color as the human eye can perceive in the real world. To mimic the perception of real-world scenes in virtual scenes, it is crucial to reproduce the effects of incident light on the human visual system. In order to advance virtual simulations towards perceptual realism, we present an eye-tracked VR/AR simulation comprising effects for gaze-dependent temporal eye adaption, perceptual glare, visual acuity reduction, and scotopic color vision. Our simulation is based on medical expert knowledge and medical studies of the healthy human eye. We conducted the first user study comparing the perception of light in a real-world low-light scene to a VR simulation. Our results show that the proposed combination of simulated visual effects is well received by users and also indicate that an individual adaptation is necessary, because perception of light is highly subjective.", month = dec, journal = "IEEE Transactions on Visualization and Computer Graphics", volume = "Volume 26, Issue 12", issn = "1077-2626", doi = "10.1109/TVCG.2020.3023604", pages = "3557--3567", keywords = "perception, virtual reality, user studies", URL = "https://www.cg.tuwien.ac.at/research/publications/2020/luidolt-2020-lightperceptionVR/", } @inproceedings{Kroesl_2020_11_09, title = "CatARact: Simulating Cataracts in Augmented Reality", author = "Katharina Kr\"{o}sl and Carmine Elvezio and Laura R. Luidolt and Matthias H\"{u}rbe and Sonja Karst and Steven Feiner and Michael Wimmer", year = "2020", abstract = "For our society to be more inclusive and accessible, the more than 2.2 billion people worldwide with limited vision should be considered more frequently in design decisions, such as architectural planning. To help architects in evaluating their designs and give medical per-sonnel some insight on how patients experience cataracts, we worked with ophthalmologists to develop the first medically-informed, pilot-studied simulation of cataracts in eye-tracked augmented reality (AR). To test our methodology and simulation, we conducted a pilot study with cataract patients between surgeries of their two cataract-affected eyes. Participants compared the vision of their corrected eye, viewing through simulated cataracts, to that of their still affected eye, viewing an unmodified AR view. In addition, we conducted remote experiments via video call, live adjusting our simulation and comparing it to related work, with participants who had cataract surgery a few months before. We present our findings and insights from these experiments and outline avenues for future work.", month = nov, event = "IEEE International Symposium on Mixed and Augmented Reality (ISMAR).", booktitle = "IEEE International Symposium on Mixed and Augmented Reality (ISMAR).", pages = "1--10", URL = "https://www.cg.tuwien.ac.at/research/publications/2020/Kroesl_2020_11_09/", } @mastersthesis{Luidolt-2020-DA, title = "Perception of Light in Virtual Reality", author = "Laura R. Luidolt", year = "2020", abstract = "The perception of light and light incidence in the human eye is substantially different in real-world scenarios and virtual reality (VR) simulations. Standard low dynamic range displays, as used in common VR headsets, are not able to replicate the same light intensities we see in reality. Therefore, light phenomenons, such as temporal eye adaptation, perceptual glare, visual acuity reduction and scotopic color vision need to be simulated to generate realistic images. Even though, a physically based simulation of these effects could increase the perceived reality of VR applications, this topic has not been thoroughly researched yet. We propose a post-processing workflow for VR and augmented reality (AR), using eye tracking, that is based on medical studies of the healthy human eye and is able to run in real time, to simulate light effects as close to reality as possible. We improve an existing temporal eye adaptation algorithm to be view-dependent. We adapt a medically based glare simulation to run in VR and AR. Additionally, we add eye tracking to adjust the glare intensity according to the viewing direction and the glare appearance depending on the user’s pupil size. We propose a new function fit for the reduction of visual acuity in VR head mounted displays. Finally, we include scotopic color vision for more realistic rendering of low-light scenes. We conducted a primarily qualitative pilot study, comparing a real-world low-light scene to our VR simulation through individual, perceptual evaluation. Most participants mentioned, that the simulation of temporal eye adaptation, visual acuity reduction and scotopic color vision was similar or the same as their own perception in the real world. However, further work is necessary to improve the appearance and movement of our proposed glare kernel. We conclude, that our work has laid a ground base for further research regarding the simulation and individual adaptation to the perception of light in VR.", month = feb, address = "Favoritenstrasse 9-11/E193-02, A-1040 Vienna, Austria", school = "Research Unit of Computer Graphics, Institute of Visual Computing and Human-Centered Technology, Faculty of Informatics, TU Wien", keywords = "perception, temporal eye adaptation, glare, virtual reality, scotopic vision, visual acuity reduction, augmented reality", URL = "https://www.cg.tuwien.ac.at/research/publications/2020/Luidolt-2020-DA/", } @article{Reichinger-2018-TAC, title = "Pictures in Your Mind: Using Interactive Gesture-Controlled Reliefs to Explore Art", author = "Andreas Reichinger and H. G. Carrizosa and J. Wood and S. Schr\"{o}der and C. L\"{o}w and Laura R. Luidolt and Maria Schimkowitsch and Anton Fuhrmann and Stefan Maierhofer and Werner Purgathofer", year = "2018", abstract = "Tactile reliefs offer many benefits over the more classic raised line drawings or tactile diagrams, as depth, 3D shape, and surface textures are directly perceivable. Although often created for blind and visually impaired (BVI) people, a wider range of people may benefit from such multimodal material. However, some reliefs are still difficult to understand without proper guidance or accompanying verbal descriptions, hindering autonomous exploration. In this work, we present a gesture-controlled interactive audio guide (IAG) based on recent low-cost depth cameras that can be operated directly with the hands on relief surfaces during tactile exploration. The interactively explorable, location-dependent verbal and captioned descriptions promise rapid tactile accessibility to 2.5D spatial information in a home or education setting, to online resources, or as a kiosk installation at public places. We present a working prototype, discuss design decisions, and present the results of two evaluation studies: the first with 13 BVI test users and the second follow-up study with 14 test users across a wide range of people with differences and difficulties associated with perception, memory, cognition, and communication. The participant-led research method of this latter study prompted new, significant and innovative developments. ", month = mar, journal = "ACM Transactions on Accessible Computing", number = "2", URL = "https://www.cg.tuwien.ac.at/research/publications/2018/Reichinger-2018-TAC/", }