@inproceedings{panfili-2021-myop, title = "Myopia in Head-Worn Virtual Reality", author = "Lara Panfili and Michael Wimmer and Katharina Kr\"{o}sl", year = "2021", abstract = "In this work, we investigate the influence of myopia on the perceived visual acuity (VA) in head-worn virtual reality (VR). Factors such as display resolution or vision capabilities of users influence the VA in VR. We simulated eyesight tests in VR and on a desktop screen and conducted a user study comparing VA measurements of participants with normal sight and participants with myopia. Surprisingly, our results suggest that people with severe myopia can see better in VR than in the real world, while the VA of people with normal or corrected sight or mild myopia is reduced in VR.", month = mar, booktitle = "2021 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", doi = "10.1109/VRW52623.2021.00197", isbn = "978-1-6654-1166-0", location = "online", publisher = "IEEE Computer Society Press", pages = "2", pages = "629--630", keywords = "visual impairments", URL = "https://www.cg.tuwien.ac.at/research/publications/2021/panfili-2021-myop/", } @article{luidolt-2020-lightperceptionVR, title = "Gaze-Dependent Simulation of Light Perception in Virtual Reality", author = "Laura R. Luidolt and Michael Wimmer and Katharina Kr\"{o}sl", year = "2020", abstract = "The perception of light is inherently different inside a virtual reality (VR) or augmented reality (AR) simulation when compared to the real world. Conventional head-worn displays (HWDs) are not able to display the same high dynamic range of brightness and color as the human eye can perceive in the real world. To mimic the perception of real-world scenes in virtual scenes, it is crucial to reproduce the effects of incident light on the human visual system. In order to advance virtual simulations towards perceptual realism, we present an eye-tracked VR/AR simulation comprising effects for gaze-dependent temporal eye adaption, perceptual glare, visual acuity reduction, and scotopic color vision. Our simulation is based on medical expert knowledge and medical studies of the healthy human eye. We conducted the first user study comparing the perception of light in a real-world low-light scene to a VR simulation. Our results show that the proposed combination of simulated visual effects is well received by users and also indicate that an individual adaptation is necessary, because perception of light is highly subjective.", month = dec, journal = "IEEE Transactions on Visualization and Computer Graphics", volume = "Volume 26, Issue 12", issn = "1077-2626", doi = "10.1109/TVCG.2020.3023604", pages = "3557--3567", keywords = "perception, virtual reality, user studies", URL = "https://www.cg.tuwien.ac.at/research/publications/2020/luidolt-2020-lightperceptionVR/", } @phdthesis{KROESL-2020-SVI, title = "Simulating Vision Impairments in Virtual and Augmented Reality", author = "Katharina Kr\"{o}sl", year = "2020", abstract = "There are at least 2.2 billion people affected by vision impairments worldwide, and the number of people suffering from common eye diseases like cataracts, diabetic retinopathy, glaucoma or macular degeneration, which show a higher prevalence with age, is expected to rise in the years to come, due to factors like aging of the population. Medical publications, ophthalmologists and patients can give some insight into the effects of vision impairments, but for people with normal eyesight (even medical personnel) it is often hard to grasp how certain eye diseases can affect perception. We need to understand and quantify the effects of vision impairments on perception, to design cities, buildings, or lighting systems that are accessible for people with vision impairments. Conducting studies on vision impairments in the real world is challenging, because it requires a large number of participants with exactly the same type of impairment. Such a sample group is often hard or even impossible to find, since not every symptom can be assessed precisely and the same eye disease can be experienced very differently between affected people. In this thesis, we address these issues by presenting a system and a methodology to simulate vision impairments, such as refractive errors, cataracts, cornea disease, and age-related macular degeneration in virtual reality (VR) and augmented reality (AR), which allows us to conduct user studies in VR or AR with people with healthy eyesight and graphically simulated vision impairments. We present a calibration technique that allows us to calibrate individual simulated symptoms to the same level of severity for every user, taking hardware constraints as well as vision capabilities of users into account. We measured the influence of simulated reduced visual acuity on maximum recognition distances of signage in a VR study and showed that current international standards and norms do not sufficiently consider people with vision impairments. In a second study, featuring our medically based cataract simulations in VR, we found that different lighting systems can positively or negatively affect the perception of people with cataracts. We improved and extended our cataract simulation to video–see-through AR and evaluated and adjusted each simulated symptom together with cataract patients in a pilot study, showing the flexibility and potential of our approach. In future work we plan to include further vision impairments and open source our software, so it can be used for architects and lighting designers to test their designs for accessibility, for training of medical personnel, and to increase empathy for people with vision impairments. This way, we hope to contribute to making this world more inclusive for everyone. ", month = nov, address = "Favoritenstrasse 9-11/E193-02, A-1040 Vienna, Austria", school = "Research Unit of Computer Graphics, Institute of Visual Computing and Human-Centered Technology, Faculty of Informatics, TU Wien ", URL = "https://www.cg.tuwien.ac.at/research/publications/2020/KROESL-2020-SVI/", } @inproceedings{Kroesl_2020_11_09, title = "CatARact: Simulating Cataracts in Augmented Reality", author = "Katharina Kr\"{o}sl and Carmine Elvezio and Laura R. Luidolt and Matthias H\"{u}rbe and Sonja Karst and Steven Feiner and Michael Wimmer", year = "2020", abstract = "For our society to be more inclusive and accessible, the more than 2.2 billion people worldwide with limited vision should be considered more frequently in design decisions, such as architectural planning. To help architects in evaluating their designs and give medical per-sonnel some insight on how patients experience cataracts, we worked with ophthalmologists to develop the first medically-informed, pilot-studied simulation of cataracts in eye-tracked augmented reality (AR). To test our methodology and simulation, we conducted a pilot study with cataract patients between surgeries of their two cataract-affected eyes. Participants compared the vision of their corrected eye, viewing through simulated cataracts, to that of their still affected eye, viewing an unmodified AR view. In addition, we conducted remote experiments via video call, live adjusting our simulation and comparing it to related work, with participants who had cataract surgery a few months before. We present our findings and insights from these experiments and outline avenues for future work.", month = nov, event = "IEEE International Symposium on Mixed and Augmented Reality (ISMAR).", booktitle = "IEEE International Symposium on Mixed and Augmented Reality (ISMAR).", pages = "1--10", URL = "https://www.cg.tuwien.ac.at/research/publications/2020/Kroesl_2020_11_09/", } @inproceedings{kroesl-2020-XREye, title = "XREye: Simulating Visual Impairments in Eye-Tracked XR ", author = "Katharina Kr\"{o}sl and Carmine Elvezio and Matthias H\"{u}rbe and Sonja Karst and Steven Feiner and Michael Wimmer", year = "2020", abstract = "Many people suffer from visual impairments, which can be difficult for patients to describe and others to visualize. To aid in understanding what people with visual impairments experience, we demonstrate a set of medically informed simulations in eye-tracked XR of several common conditions that affect visual perception: refractive errors (myopia, hyperopia, and presbyopia), cornea disease, and age-related macular degeneration (wet and dry).", month = mar, booktitle = "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)", location = "(Atlanta) online", publisher = "IEEE", URL = "https://www.cg.tuwien.ac.at/research/publications/2020/kroesl-2020-XREye/", } @misc{kroesl-2019-MoD, title = "Master of Disaster: Virtual-Reality Response Training in Disaster Management", author = "Katharina Kr\"{o}sl and Harald Steinlechner and Johanna Donabauer and Daniel Cornel and J\"{u}rgen Waser", year = "2019", abstract = "To be prepared for flooding events, disaster response personnel has to be trained to execute developed action plans. We present a flood response training system which connects an interactive flood simulation with a VR client. The collaborative operator-trainee setup of our system allows a trainee to steer the remote simulation from within the virtual environment, evaluate the effectiveness of deployed protection measures, and compare the results across multiple simulation runs. An operator supervises the trainee's actions from a linked desktop application to provide assistance in complex tasks. The versatility of our system is demonstrated on four different city models.", month = nov, journal = "Proceeding VRCAI '19 The 17th International Conference on Virtual-Reality Continuum and its Applications in Industry Article No. 49", location = "Brisbane, Australia", isbn = "978-1-4503-7002-8", event = "VRCAI 2019", doi = "10.1145/3359997.3365729", Conference date = "Poster presented at VRCAI 2019 (2019-11-14--2019-11-16)", keywords = "virtual reality, flood simulation, disaster training", URL = "https://www.cg.tuwien.ac.at/research/publications/2019/kroesl-2019-MoD/", } @inproceedings{kroesl-2019-ThesisFF, title = "Simulating Vision Impairments in VR and AR", author = "Katharina Kr\"{o}sl", year = "2019", abstract = "1.3 billion people worldwide are affected by vision impairments, according to the World Health Organization. However, vision impairments are hardly ever taken into account when we design our cities, buildings, emergency signposting, or lighting systems. With this research, we want to develop realistic, medically based simulations of eye diseases in VR and AR, which allow calibrating vision impairments to the same level for different users. This allows us to conduct user studies with participants with normal sight and graphically simulated vision impairments, to determine the effects of these impairments on perception, and to investigate lighting concepts under impaired vision conditions. This thesis will, for the first time, provide methods for architects and designers to evaluate their designs for accessibility and to develop lighting systems that can enhance the perception of people with vision impairments.", month = jun, booktitle = "ACM SIGGRAPH THESIS FAST FORWARD 2019", keywords = "vision impairments, cataracts, virtual reality, augmented reality, user study", URL = "https://www.cg.tuwien.ac.at/research/publications/2019/kroesl-2019-ThesisFF/", } @inproceedings{kroesl-2019-ICthroughVR, title = "ICthroughVR: Illuminating Cataracts through Virtual Reality", author = "Katharina Kr\"{o}sl and Carmine Elvezio and Matthias H\"{u}rbe and Sonja Karst and Michael Wimmer and Steven Feiner", year = "2019", abstract = "Vision impairments, such as cataracts, affect how many people interact with their environment, yet are rarely considered by architects and lighting designers because of a lack of design tools. To address this, we present a method to simulate vision impairments caused by cataracts in virtual reality (VR), using eye tracking for gaze-dependent effects. We conducted a user study to investigate how lighting affects visual perception for users with cataracts. Unlike past approaches, we account for the user's vision and some constraints of VR headsets, allowing for calibration of our simulation to the same level of degraded vision for all participants.", month = mar, publisher = "IEEE", location = "Osaka, Japan", event = "IEEE VR 2019, the 26th IEEE Conference on Virtual Reality and 3D User Interfaces", doi = "10.1109/VR.2019.8798239", booktitle = "2019 IEEE Conference on Virtual Reality and 3D User Interfaces", pages = "655--663", keywords = "vision impairments, cataracts, virtual reality, user study", URL = "https://www.cg.tuwien.ac.at/research/publications/2019/kroesl-2019-ICthroughVR/", } @inproceedings{schuetz-2019-CLOD, title = "Real-Time Continuous Level of Detail Rendering of Point Clouds", author = "Markus Sch\"{u}tz and Katharina Kr\"{o}sl and Michael Wimmer", year = "2019", abstract = "Real-time rendering of large point clouds requires acceleration structures that reduce the number of points drawn on screen. State-of-the art algorithms group and render points in hierarchically organized chunks with varying extent and density, which results in sudden changes of density from one level of detail to another, as well as noticeable popping artifacts when additional chunks are blended in or out. These popping artifacts are especially noticeable at lower levels of detail, and consequently in virtual reality, where high performance requirements impose a reduction in detail. We propose a continuous level-of-detail method that exhibits gradual rather than sudden changes in density. Our method continuously recreates a down-sampled vertex buffer from the full point cloud, based on camera orientation, position, and distance to the camera, in a point-wise rather than chunk-wise fashion and at speeds up to 17 million points per millisecond. As a result, additional details are blended in or out in a less noticeable and significantly less irritating manner as compared to the state of the art. The improved acceptance of our method was successfully evaluated in a user study.", month = mar, publisher = "IEEE", location = "Osaka, Japan", event = "IEEE VR 2019, the 26th IEEE Conference on Virtual Reality and 3D User Interfaces", doi = "10.1109/VR.2019.8798284", booktitle = "2019 IEEE Conference on Virtual Reality and 3D User Interfaces", pages = "103--110", keywords = "point clouds, virtual reality, VR", URL = "https://www.cg.tuwien.ac.at/research/publications/2019/schuetz-2019-CLOD/", } @inproceedings{kroesl-2018-DC, title = "[DC] Computational Design of Smart Lighting Systems for Visually Impaired People, using VR and AR Simulations", author = "Katharina Kr\"{o}sl", year = "2018", abstract = "This Doctoral Consortium paper presents my dissertation research in a multidisciplinary setting, spanning over the areas of architecture, specifically lighting design and building information modeling, to virtual reality (VR) and perception. Since vision impairments are hardly taken into account in architecture and lighting design today, this research aims to provide the necessary tools to quantify the effects of vision impairments, so design guidelines regarding these impairments can be developed. Another research goal is the determination of the influence of different lighting conditions on the perception of people with vision impairments. This would allow us to develop smart lighting systems that can aid visually impaired people by increasing their visual perception of their environment. This paper also outlines the concept for a tool to automatically generate lighting solutions and compare and test them in VR, as design aid for architects and lighting designers.", month = oct, publisher = "IEEE", location = "Munich", event = "ISMAR 2018", booktitle = "Proceedings of the 2018 IEEE International Symposium on Mixed and Augmented Reality (ISMAR-Adjunct)", keywords = "vision impairments, lighting design, virtual reality, user study", URL = "https://www.cg.tuwien.ac.at/research/publications/2018/kroesl-2018-DC/", } @misc{kroesl-2018-TVS, title = "The Virtual Schoolyard: Attention Training in Virtual Reality for Children with Attentional Disorders", author = "Katharina Kr\"{o}sl and Anna Felnhofer and Johanna X. Kafka and Laura Schuster and Alexandra Rinnerthaler and Michael Wimmer and Oswald D. Kothgassner", year = "2018", abstract = "This work presents a virtual reality simulation for training different attentional abilities in children and adolescents. In an interdisciplinary project between psychology and computer science, we developed four mini-games that are used during therapy sessions to battle different aspects of attentional disorders. First experiments show that the immersive game-like application is well received by children. Our tool is also currently part of a treatment program in an ongoing clinical study.", month = aug, publisher = "ACM", location = "Vancouver, Canada", isbn = "978-1-4503-5817-0", event = "ACM SIGGRAPH 2018", doi = "10.1145/3230744.3230817", Conference date = "Poster presented at ACM SIGGRAPH 2018 (2018-08-12--2018-08-16)", note = "Article 27--", pages = "Article 27 – ", keywords = "virtual reality, attentional disorders, user study", URL = "https://www.cg.tuwien.ac.at/research/publications/2018/kroesl-2018-TVS/", } @article{Kathi-2018-VRB, title = "A VR-based user study on the effects of vision impairments on recognition distances of escape-route signs in buildings", author = "Katharina Kr\"{o}sl and Dominik Bauer and Michael Schw\"{a}rzler and Henry Fuchs and Michael Wimmer and Georg Suter", year = "2018", abstract = "In workplaces or publicly accessible buildings, escape routes are signposted according to official norms or international standards that specify distances, angles and areas of interest for the positioning of escape-route signs. In homes for the elderly, in which the residents commonly have degraded mobility and suffer from vision impairments caused by age or eye diseases, the specifications of current norms and standards may be insufficient. Quantifying the effect of symptoms of vision impairments like reduced visual acuity on recognition distances is challenging, as it is cumbersome to find a large number of user study participants who suffer from exactly the same form of vision impairments. Hence, we propose a new methodology for such user studies: By conducting a user study in virtual reality (VR), we are able to use participants with normal or corrected sight and simulate vision impairments graphically. The use of standardized medical eyesight tests in VR allows us to calibrate the visual acuity of all our participants to the same level, taking their respective visual acuity into account. Since we primarily focus on homes for the elderly, we accounted for their often limited mobility by implementing a wheelchair simulation for our VR application.", month = apr, journal = "The Visual Computer", volume = "34", number = "6-8", issn = "0178-2789", doi = "10.1007/s00371-018-1517-7", pages = "911--923", URL = "https://www.cg.tuwien.ac.at/research/publications/2018/Kathi-2018-VRB/", } @inproceedings{PB-VRVis-2018-005, title = "An Automated Verification Workflow for Planned Lighting Setups using BIM", author = "Andreas Walch and Katharina Kr\"{o}sl and Christian Luksch and David Pichler and Thomas Pipp and Michael Schw\"{a}rzler", year = "2018", abstract = "The use of Building Information Modeling (BIM) methods is becoming more and more established in the planning stage, during the construction, and for the management of buildings. Tailored BIM software packages allow to handle a vast amount of relevant aspects, but have so far not been covering specialized tasks like the evaluation of light distributions in and around a 3D model of a building. To overcome this limitation, we demonstrate the use of the open-source IFC format for preparing and exchanging BIM data to be used in our interactive light simulation system. By exploiting the availability of 3D data and semantic descriptions, it is possible to automatically place measurement surfaces in the 3D scene, and evaluate the suitability and sustainability of a planned lighting design according to given constraints and industry norms. Interactive visualizations for fast analysis of the simulation results, created using state-of-the-art web technologies, are seamlessly integrated in the 3D work environment, helping the lighting designer to quickly improve the initial lighting solution with a few clicks.", month = apr, isbn = "978-3-9504173-5-7", series = "REAL CORP", event = "REAL CORP 2018", editor = "M. Schrenk and V. V. Popovich and P. Zeile and P. Elisei and C. Beyerand G. Navratil", booktitle = "REAL CORP 2018, Proceedings", pages = "55–65", pages = "55--65", URL = "https://www.cg.tuwien.ac.at/research/publications/2018/PB-VRVis-2018-005/", } @xmascard{kroesl_x_card_2017, title = "X-Mas Card 2017", author = "Katharina Kr\"{o}sl and Markus Sch\"{u}tz", year = "2017", abstract = "This Christmas we want to illuminate your holidays with luminaires specifically designed for this occasion. The festive scene on this card features semi-translucent luminaires in the shape of christmas trees. The light distribution of the corresponding luminaire model was simulated in LiteMaker,an interactive luminaire development tool that was developed at TU Wien and VRVis. LiteMaker provides interactive editing functionality and very fast high-quality previews of the final physically correct simulated light distribution of a luminaire model. The final scene and light distributions were rendered using our light­ planning software HILITE. Die weihnachtliche Szene auf dieser Karte wird durch Leuchten in Form von Christb\"{a}umen erhellt. F\"{u}r das Lichtdesign der Szene wurde LiteMaker verwendet, ein an der TU Wien und am VRVis entwickeltes interaktives Entwicklungswerkzeug fur Beleuchtungsk\"{o}rper. Durch interaktives Bearbeiten, und sehr schnelle aber dennoch hochwertige physikalisch korrekte Vorschaubilder, erm\"{o}glicht LiteMaker ein rascheres Design von Beleuchtungskonzepten. Die fertige Szene wurde anschlie{\ss}end in unserer Lichtsimuationssoftware HILITE gerendert.", month = dec, URL = "https://www.cg.tuwien.ac.at/research/publications/2017/kroesl_x_card_2017/", } @inproceedings{kroesl-2017-LiteMaker, title = "LiteMaker: Interactive Luminaire Development using Progressive Photon Tracing and Multi-Resolution Upsampling", author = "Katharina Kr\"{o}sl and Christian Luksch and Michael Schw\"{a}rzler and Michael Wimmer", year = "2017", abstract = "Industrial applications like luminaire development (the creation of a luminaire in terms of geometry and material) or lighting design (the efficient and aesthetic placement of luminaires in a virtual scene) rely heavily on high realism and physically correct simulations. Using typical approaches like CAD modeling and offline rendering, this requirement induces long processing times and therefore inflexible workflows. In this paper, we combine a GPU-based progressive photon-tracing algorithm to accurately simulate the light distribution of a luminaire with a novel multi-resolution image-filtering approach that produces visually meaningful intermediate results of the simulation process. By using this method in a 3D modeling environment, luminaire development is turned into an interactive process, allowing for real-time modifications and immediate feedback on the light distribution. Since the simulation results converge to a physically plausible solution that can be imported as a representation of a luminaire into a light-planning software, our work contributes to combining the two former decoupled workflows of luminaire development and lighting design, reducing the overall production time and cost for luminaire manufacturers. ", month = sep, isbn = "978-3-03868-049-9", publisher = "The Eurographics Association", location = "Bonn, Germany", event = "VMV 2017", editor = "Matthias Hullin and Reinhard Klein and Thomas Schultz and Angela Yao", doi = "10.2312/vmv.20171253", booktitle = "Vision, Modeling & Visualization", pages = "1--8", keywords = "Computing methodologies, Ray tracing, Image processing, Mesh geometry models", URL = "https://www.cg.tuwien.ac.at/research/publications/2017/kroesl-2017-LiteMaker/", } @mastersthesis{Kroesl_Katharina_2016_PPT, title = "Interactive, Progressive Photon Tracing using a Multi-Resolution Image-Filtering Approach", author = "Katharina Kr\"{o}sl", year = "2016", abstract = "Modern workflows in architectural planning and lighting design require physically reliable lighting simulations for detailed and complex 3D models. Current workflows for luminaire design and lighting design are not tailored to each other. During luminaire design, CAD programs are used to create 3D models of luminaires, and offline rendering tools are used to visualize the light distribution. In lighting design, light concepts are explored by placing light sources - previously created during luminaire design - in a 3D scene using an interactive light-planning software, but it is not possible to modify the light sources themselves. This thesis presents an interactive global-illumination algorithm to simulate the light distribution of a luminaire. The algorithm produces visually pleasing intermediate results at interactive frame rates, before converging to a physically plausible solution that can be imported as a representation of a light source into a light-planning software. We combine an interactive, progressive photon-tracing algorithm with a multi-resolution image-filtering approach. Our algorithm iteratively emits photons into a 3D scene containing the model of a luminaire and progressively refines results. We use mipmaps to create a multi-resolution approach and incorporate image-filtering techniques to obtain visually pleasing intermediate results. Evaluations based on objective quality metrics show that the presented image-filtering approach increases image quality when compared to non-filtered results. The proposed algorithm provides fast previews and allows interactive modifications of the geometry and material properties of the luminaire in real time. This reduces time between modification iterations and therefore turns luminaire design into an interactive process that reduces overall production time.Furthermore, the presented approach integrates luminaire design into lighting design and therefore provides a new way to combine two former decoupled workflows.", month = mar, address = "Favoritenstrasse 9-11/E193-02, A-1040 Vienna, Austria", school = "Institute of Computer Graphics and Algorithms, Vienna University of Technology ", keywords = "photon tracing, luminaire design, lighting design", URL = "https://www.cg.tuwien.ac.at/research/publications/2016/Kroesl_Katharina_2016_PPT/", }