[
    {
        "id": "huber-2025-eed",
        "type_id": "inproceedings",
        "tu_id": null,
        "repositum_id": "20.500.12708/220017",
        "title": "Enhancing Environmental Data Communication Through VR",
        "date": "2025-09-12",
        "abstract": "This paper examines how environmental data can be effectively communicated to non-experts in interactive, immersive 3D spaces. Utilizing a use case centered on global spatio-temporal CO2 emissions and population dynamics over the past six decades, we explore techniques for presenting both absolute (country-level) and per capita data within meaningful spatial and temporal contexts. We developed a VR prototype visualizing these data on an interactive 3D globe and conducted a qualitative user study to evaluate its clarity and interpretability for non-expert audiences. Our findings suggest that incorporating clear geographical context, intuitive representations, and user-centered interactions can enhance engagement and certain aspects of understanding. We thereby offer a practical contribution to tackling environmental data visualization and communication in immersive environments. This promotes transparency and mitigates the risk of misinterpretation or misinformation in data communication across emerging digital media platforms.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "teaser",
            "main_file": false,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 3835,
            "image_height": 1551,
            "name": "huber-2025-eed-teaser.PNG",
            "type": "image/png",
            "size": 4027742,
            "path": "Publication:huber-2025-eed",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2025/huber-2025-eed/huber-2025-eed-teaser.PNG",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2025/huber-2025-eed/huber-2025-eed-teaser:thumb{{size}}.png"
        },
        "sync_repositum_override": "date,pages_from,pages_to",
        "repositum_presentation_id": null,
        "authors": [
            5456,
            1653,
            1030,
            5485
        ],
        "ac_number": "AC17672916",
        "booktitle": "Computer Graphics & Visual Computing (CGVC) 2025 : Eurographics UK Chapter Proceedings : Liverpool John Moores University, UK : 11 - 12 September 2025",
        "date_from": "2025-09-11",
        "date_to": "2025-09-12",
        "doi": "10.2312/cgvc.20251220",
        "event": "Computer Graphics & Visual Computing 2025 (CGVC 2025)",
        "isbn": "978-3-03868-293-6",
        "lecturer": [
            5456
        ],
        "location": "Liverpool John Moores University",
        "open_access": "yes",
        "pages": "5",
        "pages_from": "1",
        "pages_to": "5",
        "publisher": "The Eurographics Association",
        "research_areas": [
            "InfoVis",
            "VR"
        ],
        "keywords": [
            "Human-centered computing",
            "User studies",
            "Virtual reality",
            "Information visualization"
        ],
        "weblinks": [],
        "files": [
            {
                "description": null,
                "filetitle": "paper",
                "main_file": true,
                "use_in_gallery": false,
                "access": "public",
                "name": "huber-2025-eed-paper.pdf",
                "type": "application/pdf",
                "size": 4585623,
                "path": "Publication:huber-2025-eed",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2025/huber-2025-eed/huber-2025-eed-paper.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2025/huber-2025-eed/huber-2025-eed-paper:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "teaser",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 3835,
                "image_height": 1551,
                "name": "huber-2025-eed-teaser.PNG",
                "type": "image/png",
                "size": 4027742,
                "path": "Publication:huber-2025-eed",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2025/huber-2025-eed/huber-2025-eed-teaser.PNG",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2025/huber-2025-eed/huber-2025-eed-teaser:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "d10164"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2025/huber-2025-eed/",
        "__class": "Publication"
    },
    {
        "id": "gall-2025-marv",
        "type_id": "journalpaper_notalk",
        "tu_id": null,
        "repositum_id": "20.500.12708/223667",
        "title": "MARV: Multiview Augmented Reality Visualisation for Exploring Rich Material Data",
        "date": "2025-09",
        "abstract": "Rich material data is complex, large and heterogeneous, integrating primary and secondary non-destructive testing data for spatial, spatio-temporal, as well as high-dimensional data analyses. Currently, materials experts mainly rely on conventional desktop-based systems using 2D visualisation techniques, which render respective analyses a time-consuming and mentally demanding challenge. MARV is a novel immersive visual analytics system, which makes analyses of such data more effective and engaging in an augmented reality setting. For this purpose, MARV includes three newly designed visualisation techniques: MDD Glyphs with a Skewness Kurtosis Mapper, Temporal Evolution Tracker, and Chrono Bins, facilitating interactive exploration and comparison of multidimensional distributions of attribute data from multiple time steps. A qualitative evaluation conducted with materials experts in a real-world case study demonstrates the benefits of the proposed visualisation techniques. This evaluation revealed that combining spatial and abstract data in an immersive environment improves their analytical capabilities and facilitates the identification of patterns, anomalies, as well as changes over time.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": null,
        "sync_repositum_override": "projects",
        "repositum_presentation_id": null,
        "authors": [
            1355,
            1354,
            166,
            611
        ],
        "articleno": "e70150",
        "doi": "10.1111/cgf.70150",
        "issn": "1467-8659",
        "journal": "Computer Graphics Forum",
        "number": "6",
        "pages": "15",
        "publisher": "WILEY",
        "volume": "44",
        "research_areas": [
            "VisMat",
            "VR"
        ],
        "keywords": [
            "virtual environments",
            "augmented reality",
            "immersive analytics",
            "scientific visualisation",
            "visualization",
            "visual analytics"
        ],
        "weblinks": [],
        "files": [],
        "projects_workgroups": [
            "vis"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2025/gall-2025-marv/",
        "__class": "Publication"
    },
    {
        "id": "walchhofer_ar_flowmaps",
        "type_id": "bachelorthesis",
        "tu_id": null,
        "repositum_id": null,
        "title": "AR Visualization of Migration Flows in Europe",
        "date": "2025-03",
        "abstract": "Origin-Destination (OD) flow maps are a tool for visualizing movement patterns in domains such as transportation, trade, or migration. OD flow maps visualize movement using a network of directional and weighted curves. Traditional 2D OD flow maps often suffer from visual clutter and occlusion, limiting their  ffectiveness in conveying complex spatial relationships. This thesis explores the use of Augmented Reality (AR) for OD flow map visualization of migration data to enhance interactivity, data comprehensibility, and spatial awareness. By extending OD flow maps to the third dimension of time, users can interact with the data  dynamically and view OD connections from multiple perspectives across a span of years. This thesis develops an approach to visualize and encode the time component using a Space-Time Cube (STC), which encodes the time as\nan additional spatial dimension. The research involves the implementation of a force-directed layout algorithm based on work by Jenny et al. and the development of a marker-based AR phone application prototype capable of visualizing migration data for EU countries spanning from 2008-2022. This thesis contributes to the fields of data visualization, computer graphics, and human-computer  interaction, providing insights into how immersive technologies can enhance spatio-temporal data visualization",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": "A 3D migration flow map of Central Europe with stacked flow lines representing individual years.",
            "filetitle": "teaser",
            "main_file": false,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 1318,
            "image_height": 977,
            "name": "walchhofer_ar_flowmaps-teaser.png",
            "type": "image/png",
            "size": 584452,
            "path": "Publication:walchhofer_ar_flowmaps",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2025/walchhofer_ar_flowmaps/walchhofer_ar_flowmaps-teaser.png",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2025/walchhofer_ar_flowmaps/walchhofer_ar_flowmaps-teaser:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            5452
        ],
        "date_end": "2025-03",
        "date_start": "2024-09",
        "matrikelnr": "12024752",
        "supervisor": [
            1813,
            1410
        ],
        "research_areas": [
            "InfoVis",
            "VR"
        ],
        "keywords": [],
        "weblinks": [],
        "files": [
            {
                "description": "A 3D migration flow map of Central Europe with stacked flow lines representing individual years.",
                "filetitle": "teaser",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 1318,
                "image_height": 977,
                "name": "walchhofer_ar_flowmaps-teaser.png",
                "type": "image/png",
                "size": 584452,
                "path": "Publication:walchhofer_ar_flowmaps",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2025/walchhofer_ar_flowmaps/walchhofer_ar_flowmaps-teaser.png",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2025/walchhofer_ar_flowmaps/walchhofer_ar_flowmaps-teaser:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "thesis",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "preview_image_width": 1318,
                "preview_image_height": 977,
                "name": "walchhofer_ar_flowmaps-thesis.pdf",
                "type": "application/pdf",
                "size": 26253749,
                "path": "Publication:walchhofer_ar_flowmaps",
                "preview_name": "walchhofer_ar_flowmaps-thesis:preview.png",
                "preview_type": "image/png",
                "preview_size": 584452,
                "url": "https://www.cg.tuwien.ac.at/research/publications/2025/walchhofer_ar_flowmaps/walchhofer_ar_flowmaps-thesis.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2025/walchhofer_ar_flowmaps/walchhofer_ar_flowmaps-thesis:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "vis"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2025/walchhofer_ar_flowmaps/",
        "__class": "Publication"
    },
    {
        "id": "huber-2025-esl",
        "type_id": "inproceedings",
        "tu_id": null,
        "repositum_id": "20.500.12708/213932",
        "title": "Exploring Seated Locomotion Techniques in Virtual Reality for People with Limited Mobility",
        "date": "2025",
        "abstract": "Virtual reality (VR) is often designed as a standing experience, excluding individuals with limited mobility. Given that a significant portion of the population experiences lower-body mobility restrictions, accessible VR locomotion must accommodate users without requiring lower-body movement. To build a comprehensive understanding of suitable locomotion techniques (LTs) for this demographic, it is crucial to evaluate the feasibility of various approaches in virtual environments (VEs). As a starting point, we present our evaluation approach and a user study on the feasibility and potential of selected LTs for accessible seated locomotion in VR. Our findings indicate that common LTs can be adapted for seated stationary VR. Teleportation-based techniques, in particular, stand out as viable options for accessible locomotion. Although our simulated wheelchair was less popular with non-disabled participants, it was well-received by wheelchair users and shows promise as an intuitive LT for (More)",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": "Different locomotion paths",
            "filetitle": "image",
            "main_file": true,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 1041,
            "image_height": 571,
            "name": "huber-2025-esl-image.png",
            "type": "image/png",
            "size": 823835,
            "path": "Publication:huber-2025-esl",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2025/huber-2025-esl/huber-2025-esl-image.png",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2025/huber-2025-esl/huber-2025-esl-image:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            5456,
            5457,
            1954,
            5458,
            5459,
            378,
            1030
        ],
        "booktitle": "Proceedings of the 20th International Joint Conference on Computer Vision, Imaging and Computer Graphics Theory and Applications - GRAPP",
        "date_from": "2025-02-26",
        "date_to": "2025-02-28",
        "event": "20th International Joint Conference on Computer Vision, Imaging and Computer Graphics Theory and Applications",
        "isbn": "978-989-758-728-3",
        "lecturer": [
            5456
        ],
        "location": "Porto",
        "pages": "13",
        "pages_from": "161",
        "pages_to": "173",
        "publisher": "SciTePress",
        "volume": "1",
        "research_areas": [
            "Rendering",
            "VR"
        ],
        "keywords": [
            "Virtual Reality",
            "Accessibility",
            "Locomotion",
            "User Study"
        ],
        "weblinks": [],
        "files": [
            {
                "description": "Different locomotion paths",
                "filetitle": "image",
                "main_file": true,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 1041,
                "image_height": 571,
                "name": "huber-2025-esl-image.png",
                "type": "image/png",
                "size": 823835,
                "path": "Publication:huber-2025-esl",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2025/huber-2025-esl/huber-2025-esl-image.png",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2025/huber-2025-esl/huber-2025-esl-image:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "paper",
                "main_file": true,
                "use_in_gallery": false,
                "access": "public",
                "name": "huber-2025-esl-paper.pdf",
                "type": "application/pdf",
                "size": 7212099,
                "path": "Publication:huber-2025-esl",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2025/huber-2025-esl/huber-2025-esl-paper.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2025/huber-2025-esl/huber-2025-esl-paper:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "d4314"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2025/huber-2025-esl/",
        "__class": "Publication"
    },
    {
        "id": "medeiros-2023-gwf",
        "type_id": "inproceedings",
        "tu_id": null,
        "repositum_id": "20.500.12708/193561",
        "title": "Going with the flow: using immersive analytics to support lifetime predictions of hydropower turbines",
        "date": "2023-10-13",
        "abstract": null,
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "teaser",
            "main_file": false,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 536,
            "image_height": 320,
            "name": "medeiros-2023-gwf-teaser.png",
            "type": "image/png",
            "size": 116775,
            "path": "Publication:medeiros-2023-gwf",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2023/medeiros-2023-gwf/medeiros-2023-gwf-teaser.png",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2023/medeiros-2023-gwf/medeiros-2023-gwf-teaser:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            5326,
            5327,
            5328,
            5329,
            950
        ],
        "booktitle": "Proceedings SUI 2023 ACM : Symposium on Spatial User Interaction",
        "date_from": "2023-10-13",
        "date_to": "2023-10-15",
        "doi": "10.1145/3607822.3618009",
        "editor": "Huang, Tony and Sra, Misha and Argelaguet, Ferran and Lopes, Pedro and Barrera Machuca, Mayra Donaji",
        "event": "SUI '23: ACM Symposium on Spatial User Interaction",
        "isbn": "979-8-4007-0281-5",
        "lecturer": [
            5326
        ],
        "location": "Sydney",
        "open_access": "no",
        "pages": "2",
        "publisher": "Association for Computing Machinery",
        "research_areas": [
            "InfoVis",
            "VR"
        ],
        "keywords": [
            "Data visualization",
            "Virtual Reality",
            "Egocentric Navigation"
        ],
        "weblinks": [],
        "files": [
            {
                "description": null,
                "filetitle": "paper",
                "main_file": true,
                "use_in_gallery": false,
                "access": "public",
                "name": "medeiros-2023-gwf-paper.pdf",
                "type": "application/pdf",
                "size": 1351229,
                "path": "Publication:medeiros-2023-gwf",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2023/medeiros-2023-gwf/medeiros-2023-gwf-paper.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2023/medeiros-2023-gwf/medeiros-2023-gwf-paper:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "teaser",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 536,
                "image_height": 320,
                "name": "medeiros-2023-gwf-teaser.png",
                "type": "image/png",
                "size": 116775,
                "path": "Publication:medeiros-2023-gwf",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2023/medeiros-2023-gwf/medeiros-2023-gwf-teaser.png",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2023/medeiros-2023-gwf/medeiros-2023-gwf-teaser:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2023/medeiros-2023-gwf/",
        "__class": "Publication"
    },
    {
        "id": "hladky-2022-QS",
        "type_id": "journalpaper",
        "tu_id": null,
        "repositum_id": "20.500.12708/152307",
        "title": "QuadStream: A Quad-Based Scene Streaming Architecture for Novel Viewpoint Reconstruction",
        "date": "2022-12",
        "abstract": "Cloud rendering is attractive when targeting thin client devices such as phones or VR/AR headsets, or any situation where a high-end GPU is not available due to thermal or power constraints. However, it introduces the challenge of streaming rendered data over a network in a manner that is robust to latency and potential dropouts. Current approaches range from streaming transmitted video and correcting it on the client---which fails in the presence of disocclusion events---to solutions where the server sends geometry and all rendering is performed on the client. To balance the competing goals of disocclusion robustness and minimal client workload, we introduce QuadStream, a new streaming technique that reduces motion-to-photon latency by allowing clients to render novel views on the fly and is robust against disocclusions. Our key idea is to transmit an approximate geometric scene representation to the client which is independent of the source geometry and can render both the current view frame and nearby adjacent views. Motivated by traditional macroblock approaches to video codec design, we decompose the scene seen from positions in a view cell into a series of view-aligned quads from multiple views, or QuadProxies. By operating on a rasterized G-Buffer, our approach is independent of the representation used for the scene itself. Our technical contributions are an efficient parallel quad generation, merging, and packing strategy for proxy views that cover potential client movement in a scene; a packing and encoding strategy allowing masked quads with depth information to be transmitted as a frame coherent stream; and an efficient rendering approach that takes advantage of modern hardware capabilities to turn our QuadStream representation into complete novel views on thin clients. According to our experiments, our approach achieves superior quality compared both to streaming methods that rely on simple video data and to geometry-based streaming.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "teaser",
            "main_file": false,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 500,
            "image_height": 281,
            "name": "hladky-2022-QS-teaser.jpg",
            "type": "image/jpeg",
            "size": 176021,
            "path": "Publication:hladky-2022-QS",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2022/hladky-2022-QS/hladky-2022-QS-teaser.jpg",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2022/hladky-2022-QS/hladky-2022-QS-teaser:thumb{{size}}.png"
        },
        "sync_repositum_override": "date,location,lecturer,event,date_to,date_from",
        "repositum_presentation_id": null,
        "authors": [
            5184,
            5185,
            5186,
            1650,
            312,
            1662
        ],
        "cfp": {
            "name": "cfp.pdf",
            "type": "application/pdf",
            "error": "0",
            "size": "737990",
            "orig_name": "cfp.pdf",
            "ext": "pdf"
        },
        "date_from": "2022-12-06",
        "date_to": "2022-12-09",
        "event": "Siggraph Asia",
        "issn": "1557-7368",
        "journal": "ACM Transactions on Graphics",
        "lecturer": [
            5184
        ],
        "location": "Daegu",
        "number": "6",
        "publisher": "ASSOC COMPUTING MACHINERY",
        "volume": "41",
        "research_areas": [
            "Geometry",
            "Rendering",
            "VR"
        ],
        "keywords": [
            "streaming",
            "real-time rendering",
            "virtual reality"
        ],
        "weblinks": [],
        "files": [
            {
                "description": null,
                "filetitle": "teaser",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 500,
                "image_height": 281,
                "name": "hladky-2022-QS-teaser.jpg",
                "type": "image/jpeg",
                "size": 176021,
                "path": "Publication:hladky-2022-QS",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2022/hladky-2022-QS/hladky-2022-QS-teaser.jpg",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2022/hladky-2022-QS/hladky-2022-QS-teaser:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2022/hladky-2022-QS/",
        "__class": "Publication"
    },
    {
        "id": "podkosova_2022_bimflexi-vr",
        "type_id": "journalpaper_notalk",
        "tu_id": null,
        "repositum_id": null,
        "title": "BIMFlexi-VR: A Virtual Reality Framework for Early-Stage Collaboration in Flexible Industrial Building Design ",
        "date": "2022-02-25",
        "abstract": "Integrated industrial building design is an interdisciplinary task, in which planning of flexible building structures requires effective communication and collaboration between all stakeholders already in early design stage. This paper presents BIMFlexi-VR, a collaborative framework which implements a real-time bidirectional link between a parametric modelling component created in Grasshopper for Rhinoceros that performs optimized structural calculations of an industrial building, and an immersive Virtual Reality environment in which the automatically calculated building is visualized. Users of BIMFlexi-VR are able to change parameters defining the outcome of the structural calculation directly inside the virtual environment and see the modified building design together with the associated fitness metrics in a matter of seconds. Providing an efficient and intuitive platform for early exploration of industrial building designs, BIMFlexi-VR enables collaborative decision making and facilitates the creation of more efficient and sustainable industrial constructions.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "",
            "main_file": false,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 365,
            "image_height": 329,
            "name": "podkosova_2022_bimflexi-vr-.JPG",
            "type": "image/jpeg",
            "size": 32979,
            "path": "Publication:podkosova_2022_bimflexi-vr",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2022/podkosova_2022_bimflexi-vr/podkosova_2022_bimflexi-vr-.JPG",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2022/podkosova_2022_bimflexi-vr/podkosova_2022_bimflexi-vr-:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1945,
            1874,
            378,
            1799
        ],
        "doi": "10.3389/frvir.2022.782169",
        "first_published": "2022-02",
        "issn": "2673-4192",
        "journal": "Frontiers in Virtual Reality",
        "open_access": "yes",
        "pages_from": "1",
        "pages_to": "13",
        "volume": "3",
        "research_areas": [
            "VR"
        ],
        "keywords": [
            "Virtual Reality in Industry 4.0",
            "Collaborative BIM",
            "VR in AEC"
        ],
        "weblinks": [
            {
                "href": "https://www.frontiersin.org/article/10.3389/frvir.2022.782169",
                "caption": "BimFlexi-VR",
                "description": null,
                "main_file": 0
            }
        ],
        "files": [
            {
                "description": null,
                "filetitle": "",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 365,
                "image_height": 329,
                "name": "podkosova_2022_bimflexi-vr-.JPG",
                "type": "image/jpeg",
                "size": 32979,
                "path": "Publication:podkosova_2022_bimflexi-vr",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2022/podkosova_2022_bimflexi-vr/podkosova_2022_bimflexi-vr-.JPG",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2022/podkosova_2022_bimflexi-vr/podkosova_2022_bimflexi-vr-:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "BimFlexi"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2022/podkosova_2022_bimflexi-vr/",
        "__class": "Publication"
    },
    {
        "id": "Alharbi_2021",
        "type_id": "journalpaper_notalk",
        "tu_id": 300116,
        "repositum_id": "20.500.12708/138522",
        "title": "Nanotilus: Generator of Immersive Guided-Tours in Crowded 3D Environments",
        "date": "2021-12-09",
        "abstract": "Immersive virtual reality environments are gaining popularity for studying and exploring crowded three-dimensional structures. When reaching very high structural densities, the natural depiction of the scene produces impenetrable clutter and requires visibility and occlusion management strategies for exploration and orientation. Strategies developed to address the crowdedness in desktop applications, however, inhibit the feeling of immersion. They result in nonimmersive, desktop-style outside-in viewing in virtual reality. This paper proposesNanotilus---a new visibility and guidance approach for very dense environments that generates an endoscopic inside-out experience instead of outside-in viewing, preserving the immersive aspect of virtual reality. The approach consists of two novel, tightly coupled mechanisms that control scene sparsification simultaneously with camera path planning. The sparsification strategy is localized around the camera and is realized as a multiscale, multishell, variety-preserving technique. When Nanotilus dives into the structures to capture internal details residing on multiple scales, it guides the camera using depth-based path planning. In addition to sparsification and path planning, we complete the tour generation with an animation controller, textual annotation, and text-to-visualization conversion. We demonstrate the generated guided tours on mesoscopic biological models -- SARS-CoV-2 and HIV viruses. We evaluate the Nanotilus experience with a baseline outside-in sparsification and navigational technique in a formal user study with 29 participants. While users can maintain a better overview using the outside-in sparsification, the study confirms our hypothesis that Nanotilus leads to stronger engagement and immersion.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "Image",
            "main_file": true,
            "use_in_gallery": true,
            "access": "public",
            "image_width": 540,
            "image_height": 273,
            "name": "Alharbi_2021-Image.JPG",
            "type": "image/jpeg",
            "size": 46269,
            "path": "Publication:Alharbi_2021",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Alharbi_2021/Alharbi_2021-Image.JPG",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Alharbi_2021/Alharbi_2021-Image:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1791,
            1789,
            1577,
            1110,
            1383,
            1891,
            1285,
            166,
            171
        ],
        "doi": "10.1109/TVCG.2021.3133592",
        "first_published": "2021-12-09",
        "journal": "IEEE Transactions on Visualization and Computer Graphics",
        "open_access": "yes",
        "pages_from": "1",
        "pages_to": "16",
        "research_areas": [
            "BioVis",
            "IllVis",
            "VR"
        ],
        "keywords": [],
        "weblinks": [],
        "files": [
            {
                "description": null,
                "filetitle": "Image",
                "main_file": true,
                "use_in_gallery": true,
                "access": "public",
                "image_width": 540,
                "image_height": 273,
                "name": "Alharbi_2021-Image.JPG",
                "type": "image/jpeg",
                "size": 46269,
                "path": "Publication:Alharbi_2021",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Alharbi_2021/Alharbi_2021-Image.JPG",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Alharbi_2021/Alharbi_2021-Image:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "Paper",
                "main_file": true,
                "use_in_gallery": true,
                "access": "public",
                "name": "Alharbi_2021-Paper.pdf",
                "type": "application/pdf",
                "size": 14837965,
                "path": "Publication:Alharbi_2021",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Alharbi_2021/Alharbi_2021-Paper.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Alharbi_2021/Alharbi_2021-Paper:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "vis"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Alharbi_2021/",
        "__class": "Publication"
    },
    {
        "id": "Bayer_Alexander_2021_FTiVR",
        "type_id": "masterthesis",
        "tu_id": null,
        "repositum_id": "20.500.12708/18848",
        "title": "Foot Tracking in Virtual Reality",
        "date": "2021-10-14",
        "abstract": "The visualisation of limbs in Virtual Reality (VR) helps to get a better immersion in the virtual world and it creates better confidence in movement. Sadly a lot of VR applications omit the visualisation of limbs. One reason lies in technical difficulties with bigger scale VR environments and multi-user VR environments where you can not rely on outside-in tracking methods because of the size and possible occlusion that hinders accurate tracking data. Another reason is that developers do not want to exclude parts of their already small user base by demanding special hardware for foot tracking that costs as much as the hand controllers but is only usable in a small number of applications.\nThis thesis tackles these problems by generating a lightweight tracking system that only relies on the correct tracking of the head position so that either inside-out or outside-in tracking can be used with it. To achieve this, a RGB depth camera is mounted on the VR headset. A combination of fiducial marker tracking, depth tracking and inertial measurement units (IMUs) are used to track the user’s feet. These individual tracking signals are then fused to one signal that combines the advantages of the single tracking systems. This tracking information can then be used to animate the feet of a virtual avatar with an Inverse Kinematics (IK) algorithm.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": null,
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1219
        ],
        "co_supervisor": [
            1731
        ],
        "date_end": "2021",
        "date_start": "2019",
        "diploma_examina": "2021",
        "doi": "10.34726/hss.2021.77646",
        "matrikelnr": "00726255",
        "open_access": "yes",
        "pages": "68",
        "supervisor": [
            378
        ],
        "research_areas": [
            "VR"
        ],
        "keywords": [
            "Virtual Reality",
            "Foot Tracking",
            "Motion Capture"
        ],
        "weblinks": [],
        "files": [
            {
                "description": null,
                "filetitle": "poster",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "name": "Bayer_Alexander_2021_FTiVR-poster.pdf",
                "type": "application/pdf",
                "size": 3127113,
                "path": "Publication:Bayer_Alexander_2021_FTiVR",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Bayer_Alexander_2021_FTiVR/Bayer_Alexander_2021_FTiVR-poster.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Bayer_Alexander_2021_FTiVR/Bayer_Alexander_2021_FTiVR-poster:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "thesis",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "name": "Bayer_Alexander_2021_FTiVR-thesis.pdf",
                "type": "application/pdf",
                "size": 12813943,
                "path": "Publication:Bayer_Alexander_2021_FTiVR",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Bayer_Alexander_2021_FTiVR/Bayer_Alexander_2021_FTiVR-thesis.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Bayer_Alexander_2021_FTiVR/Bayer_Alexander_2021_FTiVR-thesis:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "vr"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Bayer_Alexander_2021_FTiVR/",
        "__class": "Publication"
    },
    {
        "id": "sorger-2021-egonet",
        "type_id": "journalpaper",
        "tu_id": 300416,
        "repositum_id": "20.500.12708/58630",
        "title": "Egocentric Network Exploration for Immersive Analytics",
        "date": "2021-10",
        "abstract": "To exploit the potential of immersive network analytics for engaging and effective exploration, we promote the metaphor of ``egocentrism'', where data depiction and interaction are adapted to the perspective of the user within a 3D network. Egocentrism has the potential to overcome some of the inherent downsides of virtual environments, e.g., visual clutter and cyber-sickness. To investigate the effect of this metaphor on immersive network exploration, we designed and evaluated interfaces of varying degrees of egocentrism. In a user study, we evaluated the effect of these interfaces on visual search tasks, efficiency of network traversal, spatial orientation, as well as cyber-sickness. Results show that a simple egocentric interface considerably improves visual search efficiency and navigation performance, yet does not decrease spatial orientation or increase cyber-sickness. A distorted occlusion-free view of the neighborhood only marginally improves the user's performance. We tie our findings together in an open online tool for egocentric network exploration, providing actionable insights on the benefits of the egocentric network exploration metaphor.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "teaser",
            "main_file": false,
            "use_in_gallery": true,
            "access": "public",
            "image_width": 2921,
            "image_height": 735,
            "name": "sorger-2021-egonet-teaser.png",
            "type": "image/png",
            "size": 2079618,
            "path": "Publication:sorger-2021-egonet",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2021/sorger-2021-egonet/sorger-2021-egonet-teaser.png",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/sorger-2021-egonet/sorger-2021-egonet-teaser:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1072,
            1705,
            1720,
            853,
            1110
        ],
        "cfp": {
            "name": "Call for Papers _ Pacific Graphics 2021.pdf",
            "type": "application/pdf",
            "error": "0",
            "size": "246798",
            "orig_name": "Call for Papers _ Pacific Graphics 2021.pdf",
            "ext": "pdf"
        },
        "date_from": "2021-10-18",
        "date_to": "2021-10-21",
        "doi": "10.1111/cgf.14417",
        "event": "Pacific Graphics 21",
        "journal": "Computer Graphics Forum",
        "lecturer": [
            1072
        ],
        "location": "Wellington, NZ",
        "open_access": "no",
        "pages": "12",
        "pages_from": "241",
        "pages_to": "252",
        "publisher": "John Wiley and Sons",
        "volume": "40",
        "research_areas": [
            "InfoVis",
            "NetVis",
            "VR"
        ],
        "keywords": [
            "Computer Graphics and Computer-Aided Design"
        ],
        "weblinks": [
            {
                "href": "https://vis.csh.ac.at/egocentricvr/",
                "caption": "online egocentric network",
                "description": "Online tool for egocentric network exploration leveraging insights gathered at the user study",
                "main_file": 1
            }
        ],
        "files": [
            {
                "description": null,
                "filetitle": "teaser",
                "main_file": false,
                "use_in_gallery": true,
                "access": "public",
                "image_width": 2921,
                "image_height": 735,
                "name": "sorger-2021-egonet-teaser.png",
                "type": "image/png",
                "size": 2079618,
                "path": "Publication:sorger-2021-egonet",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2021/sorger-2021-egonet/sorger-2021-egonet-teaser.png",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/sorger-2021-egonet/sorger-2021-egonet-teaser:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "the paper",
                "main_file": true,
                "use_in_gallery": false,
                "access": "public",
                "name": "sorger-2021-egonet-the paper.pdf",
                "type": "application/pdf",
                "size": 4211458,
                "path": "Publication:sorger-2021-egonet",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2021/sorger-2021-egonet/sorger-2021-egonet-the paper.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/sorger-2021-egonet/sorger-2021-egonet-the paper:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "video",
                "main_file": true,
                "use_in_gallery": false,
                "access": "public",
                "name": "sorger-2021-egonet-video.mp4",
                "type": "video/mp4",
                "size": 295211748,
                "path": "Publication:sorger-2021-egonet",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2021/sorger-2021-egonet/sorger-2021-egonet-video.mp4",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/sorger-2021-egonet/sorger-2021-egonet-video:thumb{{size}}.png",
                "video_mp4": "https://www.cg.tuwien.ac.at/research/publications/2021/sorger-2021-egonet/sorger-2021-egonet-video:video.mp4"
            }
        ],
        "projects_workgroups": [
            "vis"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2021/sorger-2021-egonet/",
        "__class": "Publication"
    },
    {
        "id": "Mortezapoor2021ssc",
        "type_id": "inproceedings",
        "tu_id": 300514,
        "repositum_id": "20.500.12708/58640",
        "title": "Safety and Security Challenges for Collaborative Robotics in VR",
        "date": "2021-08-06",
        "abstract": "Virtual reality (VR) security and privacy are not limited to existing software solutions and applications. In this article, we present to the community the challenges of VR systems with robot integration. Integrating robots under ROS poses a massive risk in terms of data security. At the same time, using a robot for simulations in VR requires, first and foremost, the user's safety - hence redundant data collection and sharing. We want to draw the community's attention to these problems through our example in order to ensure that such systems are thoroughly developed in all directions and well prepared for further deployment to the consumer market. ",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "image",
            "main_file": false,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 670,
            "image_height": 699,
            "name": "Mortezapoor2021ssc-image.png",
            "type": "image/png",
            "size": 653220,
            "path": "Publication:Mortezapoor2021ssc",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Mortezapoor2021ssc/Mortezapoor2021ssc-image.png",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Mortezapoor2021ssc/Mortezapoor2021ssc-image:thumb{{size}}.png"
        },
        "sync_repositum_override": "date",
        "repositum_presentation_id": null,
        "authors": [
            1881,
            1712
        ],
        "booktitle": "Proceedings of the 1st International Workshop on Security for XR and XR for Security (VR4Sec) at Symposium On Usable Privacy and Security (SOUPS) 2021",
        "cfp": {
            "name": "home_call.pdf",
            "type": "application/pdf",
            "error": "0",
            "size": "367194",
            "orig_name": "home_call.pdf",
            "ext": "pdf"
        },
        "date_from": "2021-08-06",
        "event": "The 1st International Workshop on Security for XR and XR for Security (VR4Sec) at Symposium On Usable Privacy and Security (SOUPS) 2021",
        "lecturer": [
            1881
        ],
        "open_access": "yes",
        "pages": "4",
        "pages_from": "1",
        "pages_to": "4",
        "publisher": "USENIX Conference Proceedings",
        "research_areas": [
            "VR"
        ],
        "keywords": [
            "robotics",
            "security",
            "Virtual Reality",
            "ROS"
        ],
        "weblinks": [],
        "files": [
            {
                "description": null,
                "filetitle": "image",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 670,
                "image_height": 699,
                "name": "Mortezapoor2021ssc-image.png",
                "type": "image/png",
                "size": 653220,
                "path": "Publication:Mortezapoor2021ssc",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Mortezapoor2021ssc/Mortezapoor2021ssc-image.png",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Mortezapoor2021ssc/Mortezapoor2021ssc-image:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "paper",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "name": "Mortezapoor2021ssc-paper.pdf",
                "type": "application/pdf",
                "size": 1932989,
                "path": "Publication:Mortezapoor2021ssc",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Mortezapoor2021ssc/Mortezapoor2021ssc-paper.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Mortezapoor2021ssc/Mortezapoor2021ssc-paper:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "d4314"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Mortezapoor2021ssc/",
        "__class": "Publication"
    },
    {
        "id": "Sprung_2021",
        "type_id": "masterthesis",
        "tu_id": 301661,
        "repositum_id": "20.500.12708/17530",
        "title": "Mobile Collaborating Robots for Direct Haptics in Mixed Reality",
        "date": "2021-04-12",
        "abstract": "After technological advancements in computer graphics and miniaturization of electric circuits, virtual reality has finally found its way into the consumer market. Commercial VR systems like HTC ’s Vive allow their wearers to experience virtual worlds realistically enough to feel audio-visually immersed. However, when interacting with the simulated environment, the limitations of such a system become apparent quickly. They o˙er no haptic capabilities or feedback beyond what is integrated in their hand-held input devices. Additional body-worn equipment, like haptic suits or exoskeletons, deliver only rudimentary haptic experiences or encumber the user’s ease of movement with excessive weight. Haptic hardware of the ‘encounter’ type are often constrained to a specific location within the simulation area or deliver only soft touching sensations because of their highly mobile but fragile architecture. Therfore, this thesis covers the topic of creating a VR system with haptic feedback and describes its design and implementation in a room sized setup. The paper shows how a mobile manipulator, like the RB-Kairos, can be combined with a virtual reality headset, like the Vive, to deliver real world props into the hands of users to enhance their virtual experience. To track the manipulator’s position with the same accuracy of the VR headset, the Vive’s Lighthouse tracking solution is integrated into the robot. On the software side, the system takes advantage of the Robot Operating System (ROS), which is already configured to control the robot’s basic functionality and is extended to include new modules handling the deliverance of haptic sensations. The simulation of the visual part of this project is handled by the gaming engine Unity, which features a variety of plugins suitable to create basic VR applications with minimal e˙ort. The communication between VR application, RB-Kairos and user is handled wirelessly via radio signals which allows unrestricted mobility for participants and robots within the simulation area. The subsequent technical evaluation o˙ers insights to operating parameters and lists potential enhancement and upgrade possibilities.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "Image",
            "main_file": true,
            "use_in_gallery": true,
            "access": "public",
            "image_width": 259,
            "image_height": 253,
            "name": "Sprung_2021-Image.JPG",
            "type": "image/jpeg",
            "size": 19711,
            "path": "Publication:Sprung_2021",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Sprung_2021/Sprung_2021-Image.JPG",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Sprung_2021/Sprung_2021-Image:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1866
        ],
        "date_end": "2011-02-10",
        "date_start": "2012-04-12",
        "diploma_examina": "2012-04-12",
        "doi": "10.34726/hss.2021.62304",
        "matrikelnr": "00725956",
        "open_access": "yes",
        "pages": "111",
        "supervisor": [
            378
        ],
        "research_areas": [
            "VR"
        ],
        "keywords": [
            "virtual reality",
            "haptics",
            "collaborate",
            "robot",
            "rb-kairos",
            "ur-10",
            "vive",
            "robot operating system",
            "ros",
            "steamvr"
        ],
        "weblinks": [],
        "files": [
            {
                "description": null,
                "filetitle": "Image",
                "main_file": true,
                "use_in_gallery": true,
                "access": "public",
                "image_width": 259,
                "image_height": 253,
                "name": "Sprung_2021-Image.JPG",
                "type": "image/jpeg",
                "size": 19711,
                "path": "Publication:Sprung_2021",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Sprung_2021/Sprung_2021-Image.JPG",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Sprung_2021/Sprung_2021-Image:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "Master Thesis",
                "main_file": true,
                "use_in_gallery": true,
                "access": "public",
                "name": "Sprung_2021-Master Thesis.pdf",
                "type": "application/pdf",
                "size": 18428267,
                "path": "Publication:Sprung_2021",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Sprung_2021/Sprung_2021-Master Thesis.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Sprung_2021/Sprung_2021-Master Thesis:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "Poster",
                "main_file": true,
                "use_in_gallery": true,
                "access": "public",
                "name": "Sprung_2021-Poster.pdf",
                "type": "application/pdf",
                "size": 2538198,
                "path": "Publication:Sprung_2021",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Sprung_2021/Sprung_2021-Poster.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Sprung_2021/Sprung_2021-Poster:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "vr"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Sprung_2021/",
        "__class": "Publication"
    },
    {
        "id": "pirch_2021_VRN",
        "type_id": "journalpaper_notalk",
        "tu_id": 299243,
        "repositum_id": "20.500.12708/138336",
        "title": "The VRNetzer platform enables interactive network analysis in Virtual Reality",
        "date": "2021-04",
        "abstract": "Networks provide a powerful representation of interacting components within complex\nsystems, making them ideal for visually and analytically exploring big data. However, the size\nand complexity of many networks render static visualizations on typically-sized paper or\nscreens impractical, resulting in proverbial ‘hairballs’. Here, we introduce a Virtual Reality\n(VR) platform that overcomes these limitations by facilitating the thorough visual, and\ninteractive, exploration of large networks. Our platform allows maximal customization and\nextendibility, through the import of custom code for data analysis, integration of external\ndatabases, and design of arbitrary user interface elements, among other features. As a proof\nof concept, we show how our platform can be used to interactively explore genome-scale\nmolecular networks to identify genes associated with rare diseases and understand how they\nmight contribute to disease development. Our platform represents a general purpose, VRbased\ndata exploration platform for large and diverse data types by providing an interface\nthat facilitates the interaction between human intuition and state-of-the-art analysis\nmethods.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": null,
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1835,
            1836,
            1837,
            1838,
            1839,
            1840,
            1841,
            1842,
            1945,
            378,
            1843
        ],
        "doi": "10.1038/s41467-021-22570-w",
        "first_published": "2021-04",
        "journal": "Nature Communications",
        "number": "2432",
        "open_access": "yes",
        "pages_from": "1",
        "pages_to": "14",
        "volume": "12",
        "research_areas": [
            "InfoVis",
            "VR"
        ],
        "keywords": [
            "virtual realitz"
        ],
        "weblinks": [
            {
                "href": "https://www.nature.com/articles/s41467-021-22570-w",
                "caption": "Publication",
                "description": "The text of the publicatoin (open access). ",
                "main_file": 0
            }
        ],
        "files": [],
        "projects_workgroups": [
            "vr"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2021/pirch_2021_VRN/",
        "__class": "Publication"
    },
    {
        "id": "reimer-2021-CVR",
        "type_id": "journalpaper_notalk",
        "tu_id": 299074,
        "repositum_id": "20.500.12708/138272",
        "title": "Colocation for SLAM-Tracked VR Headsets with Hand Tracking",
        "date": "2021-04",
        "abstract": "In colocated multi-user Virtual Reality applications, relative user positions in the virtual environment need to match their relative positions in the physical tracking space. A mismatch between virtual and real relative user positions might lead to harmful events such as physical user collisions. This paper examines three calibration methods that enable colocated Virtual Reality scenarios for SLAM-tracked head-mounted displays without the need for an external tracking system. Two of these methods—fixed-point calibration and marked-based calibration—have been described in previous research; the third method that uses hand tracking capabilities of head-mounted displays is novel. We evaluated the accuracy of these three methods in an experimental procedure with two colocated Oculus Quest devices. The results of the evaluation show that our novel hand tracking-based calibration method provides better accuracy and consistency while at the same time being easy to execute. The paper further discusses the potential of all evaluated calibration methods. ",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "image",
            "main_file": false,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 1944,
            "image_height": 972,
            "preview_image_width": 1944,
            "preview_image_height": 972,
            "name": "reimer-2021-CVR-image.jpg",
            "type": "image/jpeg",
            "size": 301602,
            "path": "Publication:reimer-2021-CVR",
            "preview_name": "reimer-2021-CVR-image:preview.jpg",
            "preview_type": "image/jpeg",
            "preview_size": 301602,
            "url": "https://www.cg.tuwien.ac.at/research/publications/2021/reimer-2021-CVR/reimer-2021-CVR-image.jpg",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/reimer-2021-CVR/reimer-2021-CVR-image:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1832,
            1945,
            1833,
            378
        ],
        "doi": "10.3390/computers10050058",
        "first_published": "2021-04",
        "issn": "2073-431X",
        "journal": "Computers",
        "number": "5",
        "open_access": "yes",
        "pages_from": "1",
        "pages_to": "17",
        "volume": "10",
        "research_areas": [
            "VR"
        ],
        "keywords": [
            " colocation",
            "multi-user VR",
            "hand tracking ",
            "shared space"
        ],
        "weblinks": [
            {
                "href": "https://www.mdpi.com/2073-431X/10/5/58",
                "caption": null,
                "description": null,
                "main_file": 0
            }
        ],
        "files": [
            {
                "description": null,
                "filetitle": "image",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 1944,
                "image_height": 972,
                "preview_image_width": 1944,
                "preview_image_height": 972,
                "name": "reimer-2021-CVR-image.jpg",
                "type": "image/jpeg",
                "size": 301602,
                "path": "Publication:reimer-2021-CVR",
                "preview_name": "reimer-2021-CVR-image:preview.jpg",
                "preview_type": "image/jpeg",
                "preview_size": 301602,
                "url": "https://www.cg.tuwien.ac.at/research/publications/2021/reimer-2021-CVR/reimer-2021-CVR-image.jpg",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/reimer-2021-CVR/reimer-2021-CVR-image:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "vr"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2021/reimer-2021-CVR/",
        "__class": "Publication"
    },
    {
        "id": "Vonach021",
        "type_id": "journalpaper_notalk",
        "tu_id": 298566,
        "repositum_id": "20.500.12708/138122",
        "title": "StARboard & TrACTOr: Actuated Tangibles in an Educational TAR Application",
        "date": "2021-02-09",
        "abstract": "We explore the potential of direct haptic interaction in a novel approach to Tangible Augmented Reality in an educational context. Employing our prototyping platform ACTO, we developed a tabletop Augmented Reality application StARboard for sailing students. In this personal viewpoint environment virtual objects, e.g., sailing ships, are physically represented by actuated micro robots. These align with virtual objects, allowing direct physical interaction with the scene. When a user tries to pick up a virtual ship, its physical robot counterpart is grabbed instead. We also developed a tracking solution TrACTOr, employing a depth sensor to allow tracking independent of the table surface. In this paper we present concept and development of StARboard and TrACTOr. We report results of our user study with 18 participants using our prototype. They show that direct haptic interaction in tabletop AR scores en-par with traditional mouse interaction on a desktop setup in usability (mean SUS = 86.7 vs. 82.9) and performance (mean RTLX = 15.0 vs. 14.8), while outperforming the mouse in factors related to learning like presence (mean 6.0 vs 3.1) and absorption (mean 5.4 vs. 4.2). It was also rated the most fun (13× vs. 0×) and most suitable for learning (9× vs. 4×).",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "ACTO with overlay",
            "main_file": false,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 785,
            "image_height": 541,
            "name": "Vonach021-ACTO with overlay.jpg",
            "type": "image/jpeg",
            "size": 259725,
            "path": "Publication:Vonach021",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Vonach021/Vonach021-ACTO with overlay.jpg",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Vonach021/Vonach021-ACTO with overlay:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1944,
            1871,
            378
        ],
        "doi": "10.3390/mti5020006",
        "issn": "2414-4088",
        "journal": "Multimodal Technologies and Interaction",
        "number": "2",
        "open_access": "yes",
        "pages_from": "1",
        "pages_to": "22",
        "volume": "5",
        "research_areas": [
            "VR"
        ],
        "keywords": [
            "Tangible Augmented Reality; Tangible User Interface; tabletop interaction; robots; actuation; haptics"
        ],
        "weblinks": [
            {
                "href": "https://www.mdpi.com/2414-4088/5/2/6",
                "caption": null,
                "description": null,
                "main_file": 0
            }
        ],
        "files": [
            {
                "description": null,
                "filetitle": "ACTO with overlay",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 785,
                "image_height": 541,
                "name": "Vonach021-ACTO with overlay.jpg",
                "type": "image/jpeg",
                "size": 259725,
                "path": "Publication:Vonach021",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Vonach021/Vonach021-ACTO with overlay.jpg",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Vonach021/Vonach021-ACTO with overlay:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Vonach021/",
        "__class": "Publication"
    },
    {
        "id": "perl_oskar_2021_DVR",
        "type_id": "bachelorthesis",
        "tu_id": null,
        "repositum_id": null,
        "title": "Distributed Multi-User VR With Full-Body Avatars",
        "date": "2021-01-10",
        "abstract": "Social Virtual Reality applications have the potential to provide a unique way to convey a sense of social presence when compared with other ways of communication in the area of computer mediated communication. Having a virtual body in a social Virtual Reality application can not only heighten the sense of embodiment of the user but can also convey a sense of presence to other users. General interest in social virtual reality applications is rising partly due to virtual reality devices become more affordable, including input technologies like hand tracking. This thesis aims to create a multi-user Virtual Reality application that heightens social presence by representing users with a full-body avatar utilizing current entry level consumer grade virtual reality hardware. Hand tracking in combination with inverse kinematics is used to enhance perceived social presence. Those technologies provide a sufficiently convincing representation of the performed action of the user in a multi user context, while being significantly less cumbersome as solutions using additional trackers or controllers to realize a full body avatar.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "image",
            "main_file": false,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 1319,
            "image_height": 590,
            "name": "perl_oskar_2021_DVR-image.PNG",
            "type": "image/png",
            "size": 859897,
            "path": "Publication:perl_oskar_2021_DVR",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2021/perl_oskar_2021_DVR/perl_oskar_2021_DVR-image.PNG",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/perl_oskar_2021_DVR/perl_oskar_2021_DVR-image:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1831
        ],
        "date_end": "2021",
        "date_start": "2020",
        "matrikelnr": "01625757",
        "supervisor": [
            1945
        ],
        "research_areas": [
            "VR"
        ],
        "keywords": [
            "virtual reality; multi-user; hand tracking; Inverse Kinematics"
        ],
        "weblinks": [],
        "files": [
            {
                "description": null,
                "filetitle": "image",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 1319,
                "image_height": 590,
                "name": "perl_oskar_2021_DVR-image.PNG",
                "type": "image/png",
                "size": 859897,
                "path": "Publication:perl_oskar_2021_DVR",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2021/perl_oskar_2021_DVR/perl_oskar_2021_DVR-image.PNG",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/perl_oskar_2021_DVR/perl_oskar_2021_DVR-image:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "thesis",
                "main_file": true,
                "use_in_gallery": false,
                "access": "public",
                "name": "perl_oskar_2021_DVR-thesis.pdf",
                "type": "application/pdf",
                "size": 5144298,
                "path": "Publication:perl_oskar_2021_DVR",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2021/perl_oskar_2021_DVR/perl_oskar_2021_DVR-thesis.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/perl_oskar_2021_DVR/perl_oskar_2021_DVR-thesis:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "vr"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2021/perl_oskar_2021_DVR/",
        "__class": "Publication"
    },
    {
        "id": "Kan_Peter-2021-MDPI",
        "type_id": "journalpaper_notalk",
        "tu_id": 298527,
        "repositum_id": "20.500.12708/138110",
        "title": "Automatic Interior Design in Augmented Reality Based on Hierarchical Tree of Procedural Rules",
        "date": "2021",
        "abstract": "Augmented reality has a high potential in interior design due to its capability of visualizing numerous prospective designs directly in a target room. In this paper, we present our research on utilization of augmented reality for interactive and personalized furnishing. We propose a new algorithm for automated interior design which generates sensible and personalized furniture configurations. This algorithm is combined with mobile augmented reality system to provide a user with an interactive interior design try-out tool. Personalized design is achieved via a recommender system which uses user preferences and room data as input. We conducted three user studies to explore different aspects of our research. The first study investigated the user preference between augmented reality and on-screen visualization for interactive interior design. In the second user study, we studied the user preference between our algorithm for automated interior design and optimization-based algorithm. Finally, the third study evaluated the probability of sensible design generation by the compared algorithms. The main outcome of our research suggests that augmented reality is viable technology for interactive home furnishing.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "ARDesign",
            "main_file": false,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 1001,
            "image_height": 678,
            "name": "Kan_Peter-2021-MDPI-ARDesign.jpg",
            "type": "image/jpeg",
            "size": 373887,
            "path": "Publication:Kan_Peter-2021-MDPI",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Kan_Peter-2021-MDPI/Kan_Peter-2021-MDPI-ARDesign.jpg",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Kan_Peter-2021-MDPI/Kan_Peter-2021-MDPI-ARDesign:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1720,
            1872,
            923,
            1873
        ],
        "doi": "10.3390/electronics10030245",
        "journal": "Electronics",
        "number": "3",
        "open_access": "yes",
        "pages_from": "1",
        "pages_to": "17",
        "volume": "10",
        "research_areas": [
            "VR"
        ],
        "keywords": [
            "interior design",
            "augmented reality",
            "3D content generation",
            "user study",
            "personalized recommender"
        ],
        "weblinks": [
            {
                "href": "https://www.mdpi.com/2079-9292/10/3/245",
                "caption": "Paper link",
                "description": null,
                "main_file": 0
            }
        ],
        "files": [
            {
                "description": null,
                "filetitle": "ARDesign",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 1001,
                "image_height": 678,
                "name": "Kan_Peter-2021-MDPI-ARDesign.jpg",
                "type": "image/jpeg",
                "size": 373887,
                "path": "Publication:Kan_Peter-2021-MDPI",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Kan_Peter-2021-MDPI/Kan_Peter-2021-MDPI-ARDesign.jpg",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Kan_Peter-2021-MDPI/Kan_Peter-2021-MDPI-ARDesign:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Kan_Peter-2021-MDPI/",
        "__class": "Publication"
    },
    {
        "id": "luidolt-2020-lightperceptionVR",
        "type_id": "journalpaper",
        "tu_id": 291224,
        "repositum_id": "20.500.12708/140951",
        "title": "Gaze-Dependent Simulation of Light Perception in Virtual Reality",
        "date": "2020-12",
        "abstract": "The perception of light is inherently different inside a virtual reality (VR) or augmented reality (AR) simulation when compared to the real world. Conventional head-worn displays (HWDs) are not able to display the same high dynamic range of brightness and color as the human eye can perceive in the real world. To mimic the perception of real-world scenes in virtual scenes, it is crucial to reproduce the effects of incident light on the human visual system. In order to advance virtual simulations towards perceptual realism, we present an eye-tracked VR/AR simulation comprising effects for gaze-dependent temporal eye adaption, perceptual glare, visual acuity reduction, and scotopic color vision. Our simulation is based on medical expert knowledge and medical studies of the healthy human eye. We conducted the first user study comparing the perception of light in a real-world low-light scene to a VR simulation. Our results show that the proposed combination of simulated visual effects is well received by users and also indicate that an individual adaptation is necessary, because perception of light is highly subjective.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "image",
            "main_file": false,
            "use_in_gallery": true,
            "access": "public",
            "image_width": 1478,
            "image_height": 534,
            "name": "luidolt-2020-lightperceptionVR-image.jpg",
            "type": "image/jpeg",
            "size": 1390298,
            "path": "Publication:luidolt-2020-lightperceptionVR",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2020/luidolt-2020-lightperceptionVR/luidolt-2020-lightperceptionVR-image.jpg",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/luidolt-2020-lightperceptionVR/luidolt-2020-lightperceptionVR-image:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1577,
            193,
            1030
        ],
        "cfp": {
            "name": "Screenshot_2020-10-30 Call for Papers – ISMAR 2020 – International Symposium on Mixed and Augmented Reality.png",
            "type": "image/png",
            "error": "0",
            "size": "219516",
            "orig_name": "Screenshot_2020-10-30 Call for Papers – ISMAR 2020 – International Symposium on Mixed and Augmented Reality.png",
            "ext": "png"
        },
        "date_from": "2020-11-09",
        "date_to": "2020-11-13",
        "doi": "10.1109/TVCG.2020.3023604",
        "event": "ISMAR 2020​",
        "first_published": "2020-09-17",
        "issn": "1077-2626",
        "journal": "IEEE Transactions on Visualization and Computer Graphics",
        "lecturer": [
            1577
        ],
        "location": "online",
        "pages_from": "3557",
        "pages_to": "3567",
        "volume": "Volume 26, Issue 12",
        "research_areas": [
            "Perception",
            "Rendering",
            "VR"
        ],
        "keywords": [
            "perception",
            "virtual reality",
            "user studies"
        ],
        "weblinks": [
            {
                "href": "https://youtu.be/cY6z2pD7dWc",
                "caption": "Conference Talk",
                "description": null,
                "main_file": 0
            }
        ],
        "files": [
            {
                "description": null,
                "filetitle": "additional-material",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "name": "luidolt-2020-lightperceptionVR-additional-material.pdf",
                "type": "application/pdf",
                "size": 37540896,
                "path": "Publication:luidolt-2020-lightperceptionVR",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2020/luidolt-2020-lightperceptionVR/luidolt-2020-lightperceptionVR-additional-material.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/luidolt-2020-lightperceptionVR/luidolt-2020-lightperceptionVR-additional-material:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "image",
                "main_file": false,
                "use_in_gallery": true,
                "access": "public",
                "image_width": 1478,
                "image_height": 534,
                "name": "luidolt-2020-lightperceptionVR-image.jpg",
                "type": "image/jpeg",
                "size": 1390298,
                "path": "Publication:luidolt-2020-lightperceptionVR",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2020/luidolt-2020-lightperceptionVR/luidolt-2020-lightperceptionVR-image.jpg",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/luidolt-2020-lightperceptionVR/luidolt-2020-lightperceptionVR-image:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "paper",
                "main_file": true,
                "use_in_gallery": false,
                "access": "public",
                "name": "luidolt-2020-lightperceptionVR-paper.pdf",
                "type": "application/pdf",
                "size": 31511229,
                "path": "Publication:luidolt-2020-lightperceptionVR",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2020/luidolt-2020-lightperceptionVR/luidolt-2020-lightperceptionVR-paper.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/luidolt-2020-lightperceptionVR/luidolt-2020-lightperceptionVR-paper:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "slides",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "name": "luidolt-2020-lightperceptionVR-slides.pdf",
                "type": "application/pdf",
                "size": 2661389,
                "path": "Publication:luidolt-2020-lightperceptionVR",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2020/luidolt-2020-lightperceptionVR/luidolt-2020-lightperceptionVR-slides.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/luidolt-2020-lightperceptionVR/luidolt-2020-lightperceptionVR-slides:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "video",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "name": "luidolt-2020-lightperceptionVR-video.mp4",
                "type": "video/mp4",
                "size": 30909955,
                "path": "Publication:luidolt-2020-lightperceptionVR",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2020/luidolt-2020-lightperceptionVR/luidolt-2020-lightperceptionVR-video.mp4",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/luidolt-2020-lightperceptionVR/luidolt-2020-lightperceptionVR-video:thumb{{size}}.png",
                "video_mp4": "https://www.cg.tuwien.ac.at/research/publications/2020/luidolt-2020-lightperceptionVR/luidolt-2020-lightperceptionVR-video:video.mp4"
            }
        ],
        "projects_workgroups": [
            "rend"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2020/luidolt-2020-lightperceptionVR/",
        "__class": "Publication"
    },
    {
        "id": "KROESL-2020-SVI",
        "type_id": "phdthesis",
        "tu_id": null,
        "repositum_id": "20.500.12708/16475",
        "title": "Simulating Vision Impairments in Virtual and Augmented Reality",
        "date": "2020-11-30",
        "abstract": "There are at least 2.2 billion people affected by vision impairments worldwide, and the number of people suffering from common eye diseases like cataracts, diabetic retinopathy, glaucoma or macular degeneration, which show a higher prevalence with age, is expected to rise in the years to come, due to factors like aging of the population.\n\nMedical publications, ophthalmologists and patients can give some insight into the effects of vision impairments, but for people with normal eyesight (even medical personnel) it is often hard to grasp how certain eye diseases can affect perception. We need to understand and quantify the effects of vision impairments on perception, to design cities, buildings, or lighting systems that are accessible for people with vision impairments. Conducting studies on vision impairments in the real world is challenging, because it requires a large number of participants with exactly the same type of impairment. Such a sample group is often hard or even impossible to find, since not every symptom can be assessed precisely and the same eye disease can be experienced very differently between affected people.\n\nIn this thesis, we address these issues by presenting a system and a methodology to simulate vision impairments, such as refractive errors, cataracts, cornea disease, and age-related macular degeneration in virtual reality (VR) and augmented reality (AR), which allows us to conduct user studies in VR or AR with people with healthy eyesight and graphically simulated vision impairments. We present a calibration technique that allows us to calibrate individual simulated symptoms to the same level of severity for every user, taking hardware constraints as well as vision capabilities of users into account.\n\nWe measured the influence of simulated reduced visual acuity on maximum recognition distances of signage in a VR study and showed that current international standards and norms do not sufficiently consider people with vision impairments. In a second study, featuring our medically based cataract simulations in VR, we found that different lighting systems can positively or negatively affect the perception of people with cataracts. We improved and extended our cataract simulation to video–see-through AR and evaluated and adjusted each simulated symptom together with cataract patients in a pilot study, showing the flexibility and potential of our approach. In future work we plan to include further vision impairments and open source our software, so it can be used for architects and lighting designers to test their designs for accessibility, for training of medical personnel, and to increase empathy for people with vision impairments. This way, we hope to contribute to making this world more inclusive for everyone.\n",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "image",
            "main_file": false,
            "use_in_gallery": true,
            "access": "public",
            "image_width": 539,
            "image_height": 270,
            "name": "KROESL-2020-SVI-image.jpg",
            "type": "image/jpeg",
            "size": 32618,
            "path": "Publication:KROESL-2020-SVI",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2020/KROESL-2020-SVI/KROESL-2020-SVI-image.jpg",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/KROESL-2020-SVI/KROESL-2020-SVI-image:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1030
        ],
        "co_supervisor": [
            1559
        ],
        "date_end": "2020-10",
        "date_start": "2016-04",
        "duration": "4.5 years",
        "open_access": "yes",
        "reviewer_1": [
            1299
        ],
        "reviewer_2": [
            1299
        ],
        "rigorosum": "2020-11-30",
        "supervisor": [
            193
        ],
        "research_areas": [
            "Rendering",
            "VR"
        ],
        "keywords": [],
        "weblinks": [],
        "files": [
            {
                "description": null,
                "filetitle": "image",
                "main_file": false,
                "use_in_gallery": true,
                "access": "public",
                "image_width": 539,
                "image_height": 270,
                "name": "KROESL-2020-SVI-image.jpg",
                "type": "image/jpeg",
                "size": 32618,
                "path": "Publication:KROESL-2020-SVI",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2020/KROESL-2020-SVI/KROESL-2020-SVI-image.jpg",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/KROESL-2020-SVI/KROESL-2020-SVI-image:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "thesis",
                "main_file": true,
                "use_in_gallery": false,
                "access": "public",
                "name": "KROESL-2020-SVI-thesis.pdf",
                "type": "application/pdf",
                "size": 7596645,
                "path": "Publication:KROESL-2020-SVI",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2020/KROESL-2020-SVI/KROESL-2020-SVI-thesis.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/KROESL-2020-SVI/KROESL-2020-SVI-thesis:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "rend"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2020/KROESL-2020-SVI/",
        "__class": "Publication"
    },
    {
        "id": "Kroesl_2020_11_09",
        "type_id": "inproceedings",
        "tu_id": null,
        "repositum_id": "20.500.12708/55554",
        "title": "CatARact: Simulating Cataracts in Augmented Reality",
        "date": "2020-11-09",
        "abstract": "For our society to be more inclusive and accessible, the more than 2.2 billion people worldwide with limited vision should be considered more frequently in design decisions, such as architectural planning. To help architects in evaluating their designs and give medical per-sonnel some insight on how patients experience cataracts, we worked with ophthalmologists to develop the first medically-informed, pilot-studied simulation of cataracts in eye-tracked augmented reality (AR). To test our methodology and simulation, we conducted a pilot study with cataract patients between surgeries of their two cataract-affected eyes. Participants compared the vision of their corrected eye, viewing through simulated cataracts, to that of their still affected eye, viewing an unmodified AR view. In addition, we conducted remote experiments via video call, live adjusting our simulation and comparing it to related work, with participants who had cataract surgery a few months before. We present our findings and insights from these experiments and outline avenues for future work.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "image",
            "main_file": false,
            "use_in_gallery": true,
            "access": "public",
            "image_width": 1024,
            "image_height": 512,
            "name": "Kroesl_2020_11_09-image.png",
            "type": "image/png",
            "size": 483244,
            "path": "Publication:Kroesl_2020_11_09",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2020/Kroesl_2020_11_09/Kroesl_2020_11_09-image.png",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/Kroesl_2020_11_09/Kroesl_2020_11_09-image:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1030,
            1633,
            1577,
            1636,
            1635,
            1634,
            193
        ],
        "booktitle": "IEEE International Symposium on Mixed and Augmented Reality (ISMAR).",
        "cfp": {
            "name": "Call for Papers – ISMAR 2020 – International Symposium on Mixed and Augmented Reality.pdf",
            "type": "application/pdf",
            "error": "0",
            "size": "1595938",
            "orig_name": "Call for Papers – ISMAR 2020 – International Symposium on Mixed and Augmented Reality.pdf",
            "ext": "pdf"
        },
        "date_from": "2020-11-09",
        "date_to": "2020-11-13",
        "event": "IEEE International Symposium on Mixed and Augmented Reality (ISMAR).",
        "lecturer": [
            1030
        ],
        "open_access": "yes",
        "pages_from": "1",
        "pages_to": "10",
        "research_areas": [
            "VR"
        ],
        "keywords": [],
        "weblinks": [],
        "files": [
            {
                "description": null,
                "filetitle": "image",
                "main_file": false,
                "use_in_gallery": true,
                "access": "public",
                "image_width": 1024,
                "image_height": 512,
                "name": "Kroesl_2020_11_09-image.png",
                "type": "image/png",
                "size": 483244,
                "path": "Publication:Kroesl_2020_11_09",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2020/Kroesl_2020_11_09/Kroesl_2020_11_09-image.png",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/Kroesl_2020_11_09/Kroesl_2020_11_09-image:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "Paper",
                "main_file": true,
                "use_in_gallery": false,
                "access": "public",
                "name": "Kroesl_2020_11_09-Paper.pdf",
                "type": "application/pdf",
                "size": 1809637,
                "path": "Publication:Kroesl_2020_11_09",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2020/Kroesl_2020_11_09/Kroesl_2020_11_09-Paper.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/Kroesl_2020_11_09/Kroesl_2020_11_09-Paper:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "rend"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2020/Kroesl_2020_11_09/",
        "__class": "Publication"
    },
    {
        "id": "Sebernegg2020",
        "type_id": "techreport",
        "tu_id": null,
        "repositum_id": "20.500.12708/40238",
        "title": "Motion Similarity Modeling - A State of the Art Report",
        "date": "2020-08",
        "abstract": null,
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "MoSiMo",
            "main_file": false,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 1278,
            "image_height": 887,
            "name": "Sebernegg2020-MoSiMo.jpg",
            "type": "image/jpeg",
            "size": 148448,
            "path": "Publication:Sebernegg2020",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2020/Sebernegg2020/Sebernegg2020-MoSiMo.jpg",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/Sebernegg2020/Sebernegg2020-MoSiMo:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1804,
            1720,
            378
        ],
        "number": "TR-193-02-2020-5",
        "open_access": "yes",
        "research_areas": [
            "VR"
        ],
        "keywords": [],
        "weblinks": [
            {
                "href": "https://arxiv.org/abs/2008.05872",
                "caption": "arXiv",
                "description": null,
                "main_file": 1
            }
        ],
        "files": [
            {
                "description": null,
                "filetitle": "MoSiMo",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 1278,
                "image_height": 887,
                "name": "Sebernegg2020-MoSiMo.jpg",
                "type": "image/jpeg",
                "size": 148448,
                "path": "Publication:Sebernegg2020",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2020/Sebernegg2020/Sebernegg2020-MoSiMo.jpg",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/Sebernegg2020/Sebernegg2020-MoSiMo:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "vr"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2020/Sebernegg2020/",
        "__class": "Publication"
    },
    {
        "id": "Mirzaei_Mohammadreza_2020-EVR",
        "type_id": "journalpaper",
        "tu_id": 291029,
        "repositum_id": "20.500.12708/140902",
        "title": "EarVR: Using Ear Haptics in Virtual Reality for Deaf and Hard-of-Hearing People",
        "date": "2020-05",
        "abstract": "Virtual Reality (VR) has a great potential to improve skills of Deaf and Hard-of-Hearing (DHH) people. Most VR applications and devices are designed for persons without hearing problems. Therefore, DHH persons have many limitations when using VR. Adding special features in a VR environment, such as subtitles, or haptic devices will help them. Previously, it was necessary to design a special VR environment for DHH persons. We introduce and evaluate a new prototype called \"EarVR\" that can be mounted on any desktop or mobile VR Head-Mounted Display (HMD). EarVR analyzes 3D sounds in a VR environment and locates the direction of the sound source that is closest to a user. It notifies the user about the sound direction using two vibro-motors placed on the user's ears. EarVR helps DHH persons to complete sound-based VR tasks in any VR application with 3D audio and a mute option for background music. Therefore, DHH persons can use all VR applications with 3D audio, not only those applications designed for them. Our user study shows that DHH participants were able to complete a simple VR task significantly faster with EarVR than without. The completion time of DHH participants was very close to participants without hearing problems. Also, it shows that DHH participants were able to finish a complex VR task with EarVR, while without it, they could not finish the task even once. Finally, our qualitative and quantitative evaluation among DHH participants indicates that they preferred to use EarVR and it encouraged them to use VR technology more.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": "EarVR",
            "filetitle": "EarVR",
            "main_file": false,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 1110,
            "image_height": 602,
            "name": "Mirzaei_Mohammadreza_2020-EVR-EarVR.jpg",
            "type": "image/jpeg",
            "size": 499544,
            "path": "Publication:Mirzaei_Mohammadreza_2020-EVR",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2020/Mirzaei_Mohammadreza_2020-EVR/Mirzaei_Mohammadreza_2020-EVR-EarVR.jpg",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/Mirzaei_Mohammadreza_2020-EVR/Mirzaei_Mohammadreza_2020-EVR-EarVR:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1803,
            1720,
            378
        ],
        "cfp": {
            "name": "cfp2020.pdf",
            "type": "application/pdf",
            "error": "0",
            "size": "516819",
            "orig_name": "cfp2020.pdf",
            "ext": "pdf"
        },
        "date_from": "2020-03-22",
        "date_to": "2020-03-26",
        "doi": "10.1109/TVCG.2020.2973441",
        "event": "IEEE  VR 2021",
        "journal": "IEEE Transactions on Visualization and Computer Graphics",
        "lecturer": [
            378
        ],
        "number": "05",
        "open_access": "no",
        "pages_from": "2084",
        "pages_to": "2093",
        "volume": "26",
        "research_areas": [
            "VR"
        ],
        "keywords": [
            "Handicapped Aids",
            "Haptic Interfaces",
            "Helmet Mounted Displays",
            "Virtual Reality",
            "3 D Sounds",
            "3 D Audio",
            "Deaf And Hard Of Hearing People",
            "Head Mounted Display",
            "VR Application",
            "Ear VR",
            "VR Technology",
            "Haptic Devices",
            "DHH Persons",
            "Hearing Problems",
            "VR Apps."
        ],
        "weblinks": [
            {
                "href": "https://www.computer.org/csdl/journal/tg/2020/05/08998298/1hrXce2Kmhq",
                "caption": "TVCG",
                "description": null,
                "main_file": 1
            }
        ],
        "files": [
            {
                "description": "EarVR",
                "filetitle": "EarVR",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 1110,
                "image_height": 602,
                "name": "Mirzaei_Mohammadreza_2020-EVR-EarVR.jpg",
                "type": "image/jpeg",
                "size": 499544,
                "path": "Publication:Mirzaei_Mohammadreza_2020-EVR",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2020/Mirzaei_Mohammadreza_2020-EVR/Mirzaei_Mohammadreza_2020-EVR-EarVR.jpg",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/Mirzaei_Mohammadreza_2020-EVR/Mirzaei_Mohammadreza_2020-EVR-EarVR:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "vr"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2020/Mirzaei_Mohammadreza_2020-EVR/",
        "__class": "Publication"
    },
    {
        "id": "Kovacs_2020",
        "type_id": "masterthesis",
        "tu_id": null,
        "repositum_id": null,
        "title": "VR Bridges: An Approach to Simulating Uneven Surfaces in VR",
        "date": "2020-04-30",
        "abstract": "Virtual reality (VR) promises boundless potential for experiences. Yet, due to technical restrictions, current VR experiences are often limited in many ways and incomparable to their real-world counterparts. Walkable smooth uneven surfaces are inherent to reality but lacking in VR. At the same time, VR enables the alteration and manipulation of perception, o˙ering tools for reshaping the experience. In this thesis, we explore the possibility of simulating walkable smooth uneven surfaces in VR via a multi-sensory stimulation approach. We examine human height and slant perception and incorporate our findings into a multi-modal approach by combining visual manipulations, haptic and vibrotactile stimuli.\nOur approach is realized by constructing physical bridge props and creating a complex software application to introduce multi-sensory stimuli to the user. The simulation is evaluated in two user studies, each focusing on one of two di˙erently shaped physical bridge props. In the studies, we evaluate the feasibility of a flat and an upward curved prop for the simulation of di˙erent virtual surface heights. The data collected during the studies is subjected to a qualitative and quantitative analysis.\nOur results suggest that the use of a curved prop enables the convincing simulation of significantly higher uneven surfaces than the actual height of the prop. The haptic feedback of the curved surface and the proprioceptive cues of actual vertical traversal facilitate user provided height and slant estimations to be closer to the values suggested by the visual cues. The use of a flat prop is less realistic and leads to height and slant underestimations, despite the simulated visual height and slant cues. However, a flat surface might be still used to simulate indentations and protrusions with smaller height di˙erences.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "Image",
            "main_file": true,
            "use_in_gallery": true,
            "access": "public",
            "image_width": 348,
            "image_height": 181,
            "name": "Kovacs_2020-Image.JPG",
            "type": "image/jpeg",
            "size": 22461,
            "path": "Publication:Kovacs_2020",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2020/Kovacs_2020/Kovacs_2020-Image.JPG",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/Kovacs_2020/Kovacs_2020-Image:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1487
        ],
        "co_supervisor": [
            1712
        ],
        "date_end": "2020-04-30",
        "date_start": "2019-10-20",
        "diploma_examina": "2020-04-30",
        "matrikelnr": "01227520",
        "open_access": "yes",
        "supervisor": [
            378
        ],
        "research_areas": [
            "VR"
        ],
        "keywords": [],
        "weblinks": [],
        "files": [
            {
                "description": null,
                "filetitle": "Image",
                "main_file": true,
                "use_in_gallery": true,
                "access": "public",
                "image_width": 348,
                "image_height": 181,
                "name": "Kovacs_2020-Image.JPG",
                "type": "image/jpeg",
                "size": 22461,
                "path": "Publication:Kovacs_2020",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2020/Kovacs_2020/Kovacs_2020-Image.JPG",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/Kovacs_2020/Kovacs_2020-Image:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "Master Thesis",
                "main_file": true,
                "use_in_gallery": true,
                "access": "public",
                "name": "Kovacs_2020-Master Thesis.pdf",
                "type": "application/pdf",
                "size": 7768413,
                "path": "Publication:Kovacs_2020",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2020/Kovacs_2020/Kovacs_2020-Master Thesis.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/Kovacs_2020/Kovacs_2020-Master Thesis:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "vr"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2020/Kovacs_2020/",
        "__class": "Publication"
    },
    {
        "id": "kroesl-2020-XREye",
        "type_id": "otherreviewed",
        "tu_id": null,
        "repositum_id": null,
        "title": "XREye: Simulating Visual Impairments in Eye-Tracked XR ",
        "date": "2020-03",
        "abstract": "Many people suffer from visual impairments, which can be difficult for patients to describe and others to visualize. To aid in understanding what people with visual impairments experience, we demonstrate a set of medically informed simulations in eye-tracked XR of several common conditions that affect visual perception: refractive errors (myopia, hyperopia, and presbyopia), cornea disease, and age-related macular degeneration (wet and dry).",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": "live demo in mozilla social hubs room",
            "filetitle": "image",
            "main_file": true,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 561,
            "image_height": 414,
            "name": "kroesl-2020-XREye-image.png",
            "type": "image/png",
            "size": 327573,
            "path": "Publication:kroesl-2020-XREye",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2020/kroesl-2020-XREye/kroesl-2020-XREye-image.png",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/kroesl-2020-XREye/kroesl-2020-XREye-image:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1030,
            1633,
            1636,
            1635,
            1634,
            193
        ],
        "booktitle": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)",
        "location": "(Atlanta) online",
        "open_access": "yes",
        "publisher": "IEEE",
        "research_areas": [
            "Perception",
            "Rendering",
            "VR"
        ],
        "keywords": [],
        "weblinks": [],
        "files": [
            {
                "description": "extended abstract of the research demo",
                "filetitle": "extended abstract",
                "main_file": true,
                "use_in_gallery": false,
                "access": "public",
                "name": "kroesl-2020-XREye-extended abstract.pdf",
                "type": "application/pdf",
                "size": 121548,
                "path": "Publication:kroesl-2020-XREye",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2020/kroesl-2020-XREye/kroesl-2020-XREye-extended abstract.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/kroesl-2020-XREye/kroesl-2020-XREye-extended abstract:thumb{{size}}.png"
            },
            {
                "description": "live demo in mozilla social hubs room",
                "filetitle": "image",
                "main_file": true,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 561,
                "image_height": 414,
                "name": "kroesl-2020-XREye-image.png",
                "type": "image/png",
                "size": 327573,
                "path": "Publication:kroesl-2020-XREye",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2020/kroesl-2020-XREye/kroesl-2020-XREye-image.png",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/kroesl-2020-XREye/kroesl-2020-XREye-image:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "poster",
                "main_file": true,
                "use_in_gallery": false,
                "access": "public",
                "name": "kroesl-2020-XREye-poster.pdf",
                "type": "application/pdf",
                "size": 3057039,
                "path": "Publication:kroesl-2020-XREye",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2020/kroesl-2020-XREye/kroesl-2020-XREye-poster.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/kroesl-2020-XREye/kroesl-2020-XREye-poster:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "video",
                "main_file": true,
                "use_in_gallery": false,
                "access": "public",
                "name": "kroesl-2020-XREye-video.mp4",
                "type": "video/mp4",
                "size": 8756217,
                "path": "Publication:kroesl-2020-XREye",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2020/kroesl-2020-XREye/kroesl-2020-XREye-video.mp4",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/kroesl-2020-XREye/kroesl-2020-XREye-video:thumb{{size}}.png",
                "video_mp4": "https://www.cg.tuwien.ac.at/research/publications/2020/kroesl-2020-XREye/kroesl-2020-XREye-video:video.mp4"
            }
        ],
        "projects_workgroups": [
            "rend"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2020/kroesl-2020-XREye/",
        "__class": "Publication"
    },
    {
        "id": "Vasylevska2020VRB",
        "type_id": "inproceedings",
        "tu_id": 292209,
        "repositum_id": null,
        "title": "VR Bridges: An Approach to Uneven Surfaces Simulation in VR ",
        "date": "2020-03",
        "abstract": null,
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "image",
            "main_file": false,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 1256,
            "image_height": 496,
            "name": "Vasylevska2020VRB-image.png",
            "type": "image/png",
            "size": 896522,
            "path": "Publication:Vasylevska2020VRB",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2020/Vasylevska2020VRB/Vasylevska2020VRB-image.png",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/Vasylevska2020VRB/Vasylevska2020VRB-image:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1712,
            1487,
            378
        ],
        "booktitle": "Proceedings of IEEE Conference on Virtual Reality 2020",
        "cfp": {
            "name": "Call for Papers.html",
            "type": "text/html",
            "error": "0",
            "size": "134732",
            "orig_name": "Call for Papers.html",
            "ext": "html"
        },
        "date_from": "2020-03-22",
        "date_to": "2020-03-26",
        "doi": "10.1109/VR46266.2020.00-45",
        "event": "IEEE Conference on Virtual Reality 2020",
        "lecturer": [
            1712
        ],
        "location": "Atlanta, USA",
        "open_access": "yes",
        "pages_from": "388",
        "pages_to": "397",
        "publisher": "IEEE",
        "research_areas": [
            "VR"
        ],
        "keywords": [
            "Virtual Reality",
            "height perception",
            "haptics",
            "simulation",
            "vibration",
            "physical props "
        ],
        "weblinks": [],
        "files": [
            {
                "description": null,
                "filetitle": "image",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 1256,
                "image_height": 496,
                "name": "Vasylevska2020VRB-image.png",
                "type": "image/png",
                "size": 896522,
                "path": "Publication:Vasylevska2020VRB",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2020/Vasylevska2020VRB/Vasylevska2020VRB-image.png",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/Vasylevska2020VRB/Vasylevska2020VRB-image:thumb{{size}}.png"
            },
            {
                "description": "Author's copy of the publication",
                "filetitle": "paper",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "name": "Vasylevska2020VRB-paper.pdf",
                "type": "application/pdf",
                "size": 2854687,
                "path": "Publication:Vasylevska2020VRB",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2020/Vasylevska2020VRB/Vasylevska2020VRB-paper.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/Vasylevska2020VRB/Vasylevska2020VRB-paper:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2020/Vasylevska2020VRB/",
        "__class": "Publication"
    },
    {
        "id": "reimer-2020-CBG",
        "type_id": "inproceedings",
        "tu_id": null,
        "repositum_id": null,
        "title": "The Influence of Full-Body Representation on Translation and CurvatureGain",
        "date": "2020-03",
        "abstract": "Redirected Walking (RDW) techniques allow users to navigate immersive virtual environments much larger than the available tracking space by natural walking. Whereas several approaches exist, numerous RDW techniques operate by applying gains of different types to the user’s viewport. These gains must remain undetected by the user in order for a RDW technique to support plausible navigation within a virtual environment. The present paper explores the relationship between detection thresholds of redirection gains and the presence of a self-avatar within the virtual environment. In four psychophysical experiments we estimated the thresholds of curvature and translation gain with and without a virtual body. The goal was to evaluate whether a full-body representation has an impact on the detection thresholds of these gains. The results indicate that although the presence of a virtual body does not significantly affect the detectability of these gains, it supports users with the illusion of easier detection. We discuss the possibility of a future combination of full-body representations and redirected walking and if these findings influence the implementation of large virtual environments with immersive virtual body representation.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "banner_paper",
            "main_file": false,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 1772,
            "image_height": 591,
            "name": "reimer-2020-CBG-banner_paper.jpg",
            "type": "image/jpeg",
            "size": 165848,
            "path": "Publication:reimer-2020-CBG",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2020/reimer-2020-CBG/reimer-2020-CBG-banner_paper.jpg",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/reimer-2020-CBG/reimer-2020-CBG-banner_paper:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1832,
            1880,
            378,
            452
        ],
        "booktitle": "2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstractsand Workshops (VRW)",
        "cfp": {
            "name": "VHCIE@IEEEVR2020 - Submission.pdf",
            "type": "application/pdf",
            "error": "0",
            "size": "40118",
            "orig_name": "VHCIE@IEEEVR2020 - Submission.pdf",
            "ext": "pdf"
        },
        "doi": "10.1109/VRW50115.2020.00032",
        "event": "IEEEVR 2020",
        "isbn": "978-1-7281-6532-5",
        "issn": "978-1-7281-6533-2",
        "location": "Atlanta, US",
        "pages": "154-159",
        "pages_from": "154",
        "pages_to": "159",
        "publisher": "IEEE",
        "research_areas": [
            "VR"
        ],
        "keywords": [
            "redirected walking",
            "body representation",
            "curvature gain",
            "translation gain"
        ],
        "weblinks": [
            {
                "href": "https://ieeexplore.ieee.org/document/9090671",
                "caption": null,
                "description": null,
                "main_file": 0
            }
        ],
        "files": [
            {
                "description": null,
                "filetitle": "banner_paper",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 1772,
                "image_height": 591,
                "name": "reimer-2020-CBG-banner_paper.jpg",
                "type": "image/jpeg",
                "size": 165848,
                "path": "Publication:reimer-2020-CBG",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2020/reimer-2020-CBG/reimer-2020-CBG-banner_paper.jpg",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/reimer-2020-CBG/reimer-2020-CBG-banner_paper:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "vr"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2020/reimer-2020-CBG/",
        "__class": "Publication"
    },
    {
        "id": "Luidolt-2020-DA",
        "type_id": "masterthesis",
        "tu_id": null,
        "repositum_id": "20.500.12708/1203",
        "title": "Perception of Light in Virtual Reality",
        "date": "2020-02",
        "abstract": "The perception of light and light incidence in the human eye is substantially different in real-world scenarios and virtual reality (VR) simulations. Standard low dynamic range displays, as used in common VR headsets, are not able to replicate the same light intensities we see in reality. Therefore, light phenomenons, such as temporal eye adaptation, perceptual glare, visual acuity reduction and scotopic color vision need to be simulated to generate realistic images. Even though, a physically based simulation of these effects could increase the perceived reality of VR applications, this topic has not been thoroughly researched yet. \nWe propose a post-processing workflow for VR and augmented reality (AR), using eye tracking, that is based on medical studies of the healthy human eye and is able to run in real time, to simulate light effects as close to reality as possible. We improve an existing temporal eye adaptation algorithm to be view-dependent. We adapt a medically based glare simulation to run in VR and AR. Additionally, we add eye tracking to adjust the glare intensity according to the viewing direction and the glare appearance depending on the user’s pupil size. We propose a new function fit for the reduction of visual acuity in VR head mounted displays. Finally, we include scotopic color vision for more realistic rendering of low-light scenes. \nWe conducted a primarily qualitative pilot study, comparing a real-world low-light scene to our VR simulation through individual, perceptual evaluation. Most participants mentioned, that the simulation of temporal eye adaptation, visual acuity reduction and scotopic color vision was similar or the same as their own perception in the real world. However, further work is necessary to improve the appearance and movement of our proposed glare kernel. We conclude, that our work has laid a ground base for further research regarding the simulation and individual adaptation to the perception of light in VR.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "image",
            "main_file": false,
            "use_in_gallery": true,
            "access": "public",
            "image_width": 1920,
            "image_height": 1080,
            "name": "Luidolt-2020-DA-image.png",
            "type": "image/png",
            "size": 1606001,
            "path": "Publication:Luidolt-2020-DA",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2020/Luidolt-2020-DA/Luidolt-2020-DA-image.png",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/Luidolt-2020-DA/Luidolt-2020-DA-image:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1577
        ],
        "date_end": "2020-02-02",
        "date_start": "2019-04",
        "diploma_examina": "2020-02-10",
        "matrikelnr": "01427250 ",
        "supervisor": [
            1030,
            193
        ],
        "research_areas": [
            "Perception",
            "Rendering",
            "VR"
        ],
        "keywords": [
            "perception",
            "temporal eye adaptation",
            "glare",
            "virtual reality",
            "scotopic vision",
            "visual acuity reduction",
            "augmented reality"
        ],
        "weblinks": [],
        "files": [
            {
                "description": null,
                "filetitle": "image",
                "main_file": false,
                "use_in_gallery": true,
                "access": "public",
                "image_width": 1920,
                "image_height": 1080,
                "name": "Luidolt-2020-DA-image.png",
                "type": "image/png",
                "size": 1606001,
                "path": "Publication:Luidolt-2020-DA",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2020/Luidolt-2020-DA/Luidolt-2020-DA-image.png",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/Luidolt-2020-DA/Luidolt-2020-DA-image:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "poster",
                "main_file": true,
                "use_in_gallery": false,
                "access": "public",
                "name": "Luidolt-2020-DA-poster.pdf",
                "type": "application/pdf",
                "size": 6507701,
                "path": "Publication:Luidolt-2020-DA",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2020/Luidolt-2020-DA/Luidolt-2020-DA-poster.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/Luidolt-2020-DA/Luidolt-2020-DA-poster:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "thesis",
                "main_file": true,
                "use_in_gallery": false,
                "access": "public",
                "name": "Luidolt-2020-DA-thesis.pdf",
                "type": "application/pdf",
                "size": 15289421,
                "path": "Publication:Luidolt-2020-DA",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2020/Luidolt-2020-DA/Luidolt-2020-DA-thesis.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/Luidolt-2020-DA/Luidolt-2020-DA-thesis:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "rend"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2020/Luidolt-2020-DA/",
        "__class": "Publication"
    },
    {
        "id": "kroesl-2019-MoD",
        "type_id": "poster",
        "tu_id": null,
        "repositum_id": null,
        "title": "Master of Disaster: Virtual-Reality Response Training in Disaster Management",
        "date": "2019-11-15",
        "abstract": "To be prepared for flooding events, disaster response personnel has to be trained to execute developed action plans.\nWe present a flood response training system which connects an interactive flood simulation with a VR client.\nThe collaborative operator-trainee setup of our system allows a trainee to steer the remote simulation from within the virtual environment, evaluate the effectiveness of deployed protection measures, and compare the results across multiple simulation runs.\nAn operator supervises the trainee's actions from a linked desktop application to provide assistance in complex tasks.\nThe versatility of our system is demonstrated on four different city models.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "image",
            "main_file": false,
            "use_in_gallery": true,
            "access": "public",
            "image_width": 1500,
            "image_height": 1200,
            "name": "kroesl-2019-MoD-image.jpg",
            "type": "image/jpeg",
            "size": 1658126,
            "path": "Publication:kroesl-2019-MoD",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2019/kroesl-2019-MoD/kroesl-2019-MoD-image.jpg",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2019/kroesl-2019-MoD/kroesl-2019-MoD-image:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1030,
            1387,
            1266,
            877,
            798
        ],
        "cfp": {
            "name": "SUBMISSIONS – ACM SIGGRAPH VRCAI 2019.pdf",
            "type": "application/pdf",
            "error": "0",
            "size": "2196455",
            "orig_name": "SUBMISSIONS – ACM SIGGRAPH VRCAI 2019.pdf",
            "ext": "pdf"
        },
        "date_from": "2019-11-14",
        "date_to": "2019-11-16",
        "doi": "10.1145/3359997.3365729",
        "event": "VRCAI 2019",
        "isbn": "978-1-4503-7002-8",
        "journal": "Proceeding VRCAI '19 The 17th International Conference on Virtual-Reality Continuum and its Applications in Industry Article No. 49",
        "location": "Brisbane, Australia",
        "research_areas": [
            "VR"
        ],
        "keywords": [
            "virtual reality",
            "flood simulation",
            "disaster training"
        ],
        "weblinks": [],
        "files": [
            {
                "description": "author's version",
                "filetitle": "extended abstract",
                "main_file": true,
                "use_in_gallery": false,
                "access": "public",
                "name": "kroesl-2019-MoD-extended abstract.pdf",
                "type": "application/pdf",
                "size": 4942595,
                "path": "Publication:kroesl-2019-MoD",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2019/kroesl-2019-MoD/kroesl-2019-MoD-extended abstract.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2019/kroesl-2019-MoD/kroesl-2019-MoD-extended abstract:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "image",
                "main_file": false,
                "use_in_gallery": true,
                "access": "public",
                "image_width": 1500,
                "image_height": 1200,
                "name": "kroesl-2019-MoD-image.jpg",
                "type": "image/jpeg",
                "size": 1658126,
                "path": "Publication:kroesl-2019-MoD",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2019/kroesl-2019-MoD/kroesl-2019-MoD-image.jpg",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2019/kroesl-2019-MoD/kroesl-2019-MoD-image:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "poster",
                "main_file": true,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 8997,
                "image_height": 12720,
                "name": "kroesl-2019-MoD-poster.png",
                "type": "image/png",
                "size": 13621472,
                "path": "Publication:kroesl-2019-MoD",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2019/kroesl-2019-MoD/kroesl-2019-MoD-poster.png",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2019/kroesl-2019-MoD/kroesl-2019-MoD-poster:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "video",
                "main_file": true,
                "use_in_gallery": false,
                "access": "public",
                "name": "kroesl-2019-MoD-video.mp4",
                "type": "video/mp4",
                "size": 81933045,
                "path": "Publication:kroesl-2019-MoD",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2019/kroesl-2019-MoD/kroesl-2019-MoD-video.mp4",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2019/kroesl-2019-MoD/kroesl-2019-MoD-video:thumb{{size}}.png",
                "video_mp4": "https://www.cg.tuwien.ac.at/research/publications/2019/kroesl-2019-MoD/kroesl-2019-MoD-video:video.mp4"
            }
        ],
        "projects_workgroups": [
            "rend"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2019/kroesl-2019-MoD/",
        "__class": "Publication"
    },
    {
        "id": "adolf-2019-jug",
        "type_id": "inproceedings",
        "tu_id": 283044,
        "repositum_id": null,
        "title": "Juggling in VR: Advantages of Immersive Virtual Reality in Juggling Learning",
        "date": "2019-11",
        "abstract": null,
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "",
            "main_file": false,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 462,
            "image_height": 279,
            "name": "adolf-2019-jug-.jpg",
            "type": "image/jpeg",
            "size": 112678,
            "path": "Publication:adolf-2019-jug",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2019/adolf-2019-jug/adolf-2019-jug-.jpg",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2019/adolf-2019-jug/adolf-2019-jug-:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1721,
            1720,
            1722,
            378,
            1723,
            1724
        ],
        "booktitle": "25th ACM Symposium on Virtual Reality Software and Technology",
        "cfp": {
            "name": "vrstcfp.pdf",
            "type": "application/pdf",
            "error": "0",
            "size": "1742542",
            "orig_name": "vrstcfp.pdf",
            "ext": "pdf"
        },
        "event": "25th ACM Symposium on Virtual Reality Software and Technology",
        "lecturer": [
            378
        ],
        "open_access": "no",
        "pages_from": "1",
        "pages_to": "5",
        "publisher": "ACM",
        "research_areas": [
            "VR"
        ],
        "keywords": [],
        "weblinks": [
            {
                "href": "https://dl.acm.org/citation.cfm?id=3364246",
                "caption": "ACM",
                "description": null,
                "main_file": 1
            }
        ],
        "files": [
            {
                "description": null,
                "filetitle": "",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 462,
                "image_height": 279,
                "name": "adolf-2019-jug-.jpg",
                "type": "image/jpeg",
                "size": 112678,
                "path": "Publication:adolf-2019-jug",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2019/adolf-2019-jug/adolf-2019-jug-.jpg",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2019/adolf-2019-jug/adolf-2019-jug-:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "vr"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2019/adolf-2019-jug/",
        "__class": "Publication"
    },
    {
        "id": "panfili-2019-VAVR",
        "type_id": "bachelorthesis",
        "tu_id": null,
        "repositum_id": null,
        "title": "Effects of VR-Displays on Visual Acuity",
        "date": "2019-11",
        "abstract": "The perceived visual acuity (VA) of people in virtual reality (VR), using a head-mounted display (HMD), is not equal to their VA in the real world. The reason for this difference is the reduction of visual acuity in the virtual environment that is caused by various factors, such as the low resolution of the VR display. Based on those circumstances, the capacity of an individual to distinguish small details diminishes visibly. Previous studies regarding eyesight in VR have already verified how the best visual resolution in virtual environments is always lower than the natural vision and therefore this aspect could be seen as a mild vision impairment for the users of an HMD.\nThe goal of this project is to investigate how much the VA is reduced in VR and respectively whether the decrease of VA in VR is perceived similar by everyone or if visual impairments like Myopia, influence the visual perception.\nBased on a previous project, two different tests were implemented with the game engine Unreal Engine 4, a VR version for which an HTC VIVE headset was used, along with a desktop version. These tests were used to investigate the VA of the participant in a user study and the results have been compared to each other in order to find the extent to which visual impairments have an impact on VA.\n",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "image",
            "main_file": true,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 1075,
            "image_height": 918,
            "name": "panfili-2019-VAVR-image.png",
            "type": "image/png",
            "size": 637497,
            "path": "Publication:panfili-2019-VAVR",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2019/panfili-2019-VAVR/panfili-2019-VAVR-image.png",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2019/panfili-2019-VAVR/panfili-2019-VAVR-image:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1658
        ],
        "date_end": "2019-11",
        "date_start": "2019-04",
        "matrikelnr": "01527648",
        "supervisor": [
            1030
        ],
        "research_areas": [
            "Perception",
            "Rendering",
            "VR"
        ],
        "keywords": [
            "virtual reality",
            "visual acuity"
        ],
        "weblinks": [],
        "files": [
            {
                "description": null,
                "filetitle": "image",
                "main_file": true,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 1075,
                "image_height": 918,
                "name": "panfili-2019-VAVR-image.png",
                "type": "image/png",
                "size": 637497,
                "path": "Publication:panfili-2019-VAVR",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2019/panfili-2019-VAVR/panfili-2019-VAVR-image.png",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2019/panfili-2019-VAVR/panfili-2019-VAVR-image:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "thesis",
                "main_file": true,
                "use_in_gallery": false,
                "access": "public",
                "name": "panfili-2019-VAVR-thesis.pdf",
                "type": "application/pdf",
                "size": 4765683,
                "path": "Publication:panfili-2019-VAVR",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2019/panfili-2019-VAVR/panfili-2019-VAVR-thesis.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2019/panfili-2019-VAVR/panfili-2019-VAVR-thesis:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "rend"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2019/panfili-2019-VAVR/",
        "__class": "Publication"
    },
    {
        "id": "Goellner_2019-ABC",
        "type_id": "inproceedings",
        "tu_id": 283368,
        "repositum_id": null,
        "title": "Virtual Reality CBRN Defence",
        "date": "2019-10-18",
        "abstract": "Over the past decade, training in virtual reality for military and disaster preparedness has been increasingly recognized as an important adjunct to traditional modalities of real-life drills. However, there are only a few existing solutions that provide immersive virtual reality training and improve learning through an increased amount of presence. In this paper, we present a novel and flexible Virtual Reality (VR) training system for military and first responders that enables realistic multi-user training in large environments. We show how the requirements of peer stakeholders for disaster relief with an explicit focus on CBRN disaster preparedness transfer to the concept, current implementation and future features of our system. The development and integration of multiple technologies allows a wide variety of interaction and collaboration within our immersive system. In addition, we demonstrate the training capabilities of our proposed system with a multi-user training scenario, simulating a CBRN crisis. Results from our technical and user evaluation with 13 experts in CBRN response from the Austrian Armed Forces (National Defence Academy & Competence Center NBC Defence) indicate strong applicability and user acceptance. Over 80% of the participants agreed “much” or “very much” that the presented system can be used to support training for CBRN-crisis preparedness.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": null,
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1726,
            1727,
            1728,
            1729,
            1731,
            378
        ],
        "booktitle": "Meeting Proceedings of the Simulation and Modelling Group Symposium 171",
        "cfp": {
            "name": "Call_for_Papers.pdf",
            "type": "application/pdf",
            "error": "0",
            "size": "409042",
            "orig_name": "Call_for_Papers.pdf",
            "ext": "pdf"
        },
        "event": "Simulation and Modelling Group Symposium 171",
        "lecturer": [
            1731,
            1726
        ],
        "location": "Vienna",
        "open_access": "yes",
        "organization": "NATO S & T",
        "pages_from": "1",
        "pages_to": "25",
        "publisher": "STO",
        "research_areas": [
            "VR"
        ],
        "keywords": [
            "Virtual Reality",
            "Mixed Reality",
            "first responder training",
            "CBRN",
            "ABC"
        ],
        "weblinks": [
            {
                "href": "https://www.sto.nato.int/publications/STO%20Meeting%20Proceedings/STO-MP-MSG-171/MP-MSG-171-16.pdf",
                "caption": null,
                "description": null,
                "main_file": 0
            }
        ],
        "files": [
            {
                "description": null,
                "filetitle": "Paper",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "name": "Goellner_2019-ABC-Paper.pdf",
                "type": "application/pdf",
                "size": 797322,
                "path": "Publication:Goellner_2019-ABC",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2019/Goellner_2019-ABC/Goellner_2019-ABC-Paper.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2019/Goellner_2019-ABC/Goellner_2019-ABC-Paper:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "vr"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2019/Goellner_2019-ABC/",
        "__class": "Publication"
    },
    {
        "id": "kan-2019-dli",
        "type_id": "journalpaper",
        "tu_id": 280958,
        "repositum_id": null,
        "title": "DeepLight: Light Source Estimation for Augmented Reality using Deep Learning",
        "date": "2019-06",
        "abstract": "This paper presents a novel method for illumination estimation from RGB-D images. The main focus of the proposed method is to enhance visual coherence in augmented reality applications by providing accurate and temporally coherent estimates of real illumination. For this purpose, we designed and trained a deep neural network which calculates a dominant light direction from a single RGB-D image. Additionally, we propose a novel method for real-time outlier detection to achieve temporally coherent estimates. Our method for light source estimation in augmented reality was evaluated on the set of real scenes. Our results demonstrate that the neural network can successfully estimate light sources even in scenes which were not seen by the network during training. Moreover, we compared our results with illumination estimates calculated by the state-of-the-art method for illumination estimation. Finally, we demonstrate the applicability of our method on numerous augmented reality scenes.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "",
            "main_file": false,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 627,
            "image_height": 458,
            "name": "kan-2019-dli-.jpg",
            "type": "image/jpeg",
            "size": 220193,
            "path": "Publication:kan-2019-dli",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2019/kan-2019-dli/kan-2019-dli-.jpg",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2019/kan-2019-dli/kan-2019-dli-:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1720,
            378
        ],
        "cfp": {
            "name": "CGI-Proceeding-CFP-v5.pdf",
            "type": "application/pdf",
            "error": "0",
            "size": "314155",
            "orig_name": "CGI-Proceeding-CFP-v5.pdf",
            "ext": "pdf"
        },
        "date_from": "2019",
        "date_to": "2019",
        "doi": "10.1007/s00371-019-01666-x",
        "event": " Computer Graphics International 2019",
        "journal": "The Visual Computer",
        "lecturer": [
            378
        ],
        "number": "6",
        "open_access": "yes",
        "pages_from": "873",
        "pages_to": "883",
        "volume": "35",
        "research_areas": [
            "VR"
        ],
        "keywords": [
            "Light source estimation",
            "Augmented reality",
            "Photometric registration",
            "Deep learning"
        ],
        "weblinks": [
            {
                "href": "https://link.springer.com/article/10.1007/s00371-019-01666-x",
                "caption": null,
                "description": null,
                "main_file": 1
            }
        ],
        "files": [
            {
                "description": null,
                "filetitle": "",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 627,
                "image_height": 458,
                "name": "kan-2019-dli-.jpg",
                "type": "image/jpeg",
                "size": 220193,
                "path": "Publication:kan-2019-dli",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2019/kan-2019-dli/kan-2019-dli-.jpg",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2019/kan-2019-dli/kan-2019-dli-:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "vr"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2019/kan-2019-dli/",
        "__class": "Publication"
    },
    {
        "id": "schuller_reichl-2019-avt",
        "type_id": "masterthesis",
        "tu_id": 283046,
        "repositum_id": null,
        "title": "Mapping of Realism in Rendering onto Perception of Presence in Augmented Reality",
        "date": "2019-03",
        "abstract": null,
        "authors_et_al": false,
        "substitute": null,
        "main_image": null,
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1725
        ],
        "date_end": "2019",
        "date_start": "2017",
        "matrikelnr": "00825849",
        "supervisor": [
            378
        ],
        "research_areas": [
            "Perception",
            "VR"
        ],
        "keywords": [],
        "weblinks": [],
        "files": [],
        "projects_workgroups": [
            "vr"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2019/schuller_reichl-2019-avt/",
        "__class": "Publication"
    },
    {
        "id": "Prossenitsch_2019-1",
        "type_id": "masterthesis",
        "tu_id": 283432,
        "repositum_id": null,
        "title": "Outdoor Inside-Out Linear Image Sensor Tracking",
        "date": "2019-01-23",
        "abstract": null,
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "",
            "main_file": false,
            "use_in_gallery": false,
            "access": "public",
            "name": "Prossenitsch_2019-1-.pdf",
            "type": "application/pdf",
            "size": 20665309,
            "path": "Publication:Prossenitsch_2019-1",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2019/Prossenitsch_2019-1/Prossenitsch_2019-1-.pdf",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2019/Prossenitsch_2019-1/Prossenitsch_2019-1-:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1732
        ],
        "date_end": "2019",
        "date_start": "2017",
        "matrikelnr": "23.01.2019",
        "open_access": "yes",
        "supervisor": [
            378
        ],
        "research_areas": [
            "VR"
        ],
        "keywords": [],
        "weblinks": [],
        "files": [
            {
                "description": null,
                "filetitle": "",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "name": "Prossenitsch_2019-1-.pdf",
                "type": "application/pdf",
                "size": 20665309,
                "path": "Publication:Prossenitsch_2019-1",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2019/Prossenitsch_2019-1/Prossenitsch_2019-1-.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2019/Prossenitsch_2019-1/Prossenitsch_2019-1-:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "vr"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2019/Prossenitsch_2019-1/",
        "__class": "Publication"
    },
    {
        "id": "Kathi-2018-VRB",
        "type_id": "journalpaper",
        "tu_id": null,
        "repositum_id": null,
        "title": "A VR-based user study on the effects of vision impairments on recognition distances of escape-route signs in buildings",
        "date": "2018-04-30",
        "abstract": "In workplaces or publicly accessible buildings, escape routes are signposted according to official norms or international standards that specify distances, angles and areas of interest for the positioning of escape-route signs. In homes for the elderly, in which the residents commonly have degraded mobility and suffer from vision impairments caused by age or eye diseases, the specifications of current norms and standards may be insufficient. Quantifying the effect of symptoms of vision impairments like reduced visual acuity on recognition distances is challenging, as it is cumbersome to find a large number of user study participants who suffer from exactly the same form of vision impairments. Hence, we propose a new methodology for such user studies: By conducting a user study in virtual reality (VR), we are able to use participants with normal or corrected sight and simulate vision impairments graphically. The use of standardized medical eyesight tests in VR allows us to calibrate the visual acuity of all our participants to the same level, taking their respective visual acuity into account. Since we primarily focus on homes for the elderly, we accounted for their often limited mobility by implementing a wheelchair simulation for our VR application.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "image",
            "main_file": false,
            "use_in_gallery": true,
            "access": "public",
            "image_width": 327,
            "image_height": 327,
            "name": "Kathi-2018-VRB-image.JPG",
            "type": "image/jpeg",
            "size": 24451,
            "path": "Publication:Kathi-2018-VRB",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2018/Kathi-2018-VRB/Kathi-2018-VRB-image.JPG",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2018/Kathi-2018-VRB/Kathi-2018-VRB-image:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1030,
            1551,
            678,
            1492,
            193,
            1559
        ],
        "date_from": "2018-06-11",
        "date_to": "2018-06-14",
        "doi": "10.1007/s00371-018-1517-7",
        "event": "Computer Graphics International (CGI)",
        "issn": "0178-2789",
        "journal": "The Visual Computer",
        "lecturer": [
            1030
        ],
        "location": "Bintan, Indonesia",
        "number": "6-8",
        "open_access": "yes",
        "pages_from": "911",
        "pages_to": "923",
        "volume": "34",
        "research_areas": [
            "Perception",
            "Rendering",
            "VR"
        ],
        "keywords": [],
        "weblinks": [
            {
                "href": "https://link.springer.com/article/10.1007%2Fs00371-018-1517-7",
                "caption": null,
                "description": null,
                "main_file": 0
            }
        ],
        "files": [
            {
                "description": null,
                "filetitle": "image",
                "main_file": false,
                "use_in_gallery": true,
                "access": "public",
                "image_width": 327,
                "image_height": 327,
                "name": "Kathi-2018-VRB-image.JPG",
                "type": "image/jpeg",
                "size": 24451,
                "path": "Publication:Kathi-2018-VRB",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2018/Kathi-2018-VRB/Kathi-2018-VRB-image.JPG",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2018/Kathi-2018-VRB/Kathi-2018-VRB-image:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "Paper",
                "main_file": true,
                "use_in_gallery": false,
                "access": "public",
                "name": "Kathi-2018-VRB-Paper.pdf",
                "type": "application/pdf",
                "size": 1077352,
                "path": "Publication:Kathi-2018-VRB",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2018/Kathi-2018-VRB/Kathi-2018-VRB-Paper.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2018/Kathi-2018-VRB/Kathi-2018-VRB-Paper:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "rend",
            "VRVis"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2018/Kathi-2018-VRB/",
        "__class": "Publication"
    },
    {
        "id": "walchhofer-tso",
        "type_id": "poster",
        "tu_id": null,
        "repositum_id": null,
        "title": "Two Sides of the Same Cube: Towards Immersive Dynamic Viusalization of Migration Data",
        "date": null,
        "abstract": "Migration is a complex global phenomenon, often presented in a highly sensationalized way by the media. The number of people arriving at a country from a specific origin strongly varies over time. In this poster, we describe two different methods to immersively visualize dynamic migration. We utilize discrete time flattening to create a three-dimensional spacetime-cube-like representation in two separate manifestations: i) A physical sculpture made of transparent media and ii) an AR visualization for smartphones. In future work, we seek to compare how our different manifestations of immersive 3D concepts affect users of the general public.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": null,
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            5452,
            5449,
            1813
        ],
        "date_from": "2025-11-02",
        "date_to": "2025-11-07",
        "event": "IEEE VIS 2025",
        "research_areas": [
            "Fabrication",
            "VR"
        ],
        "keywords": [],
        "weblinks": [],
        "files": [
            {
                "description": null,
                "filetitle": "poster",
                "main_file": false,
                "use_in_gallery": true,
                "access": "public",
                "name": "walchhofer-tso-poster.pdf",
                "type": "application/pdf",
                "size": 8166135,
                "path": "Publication:walchhofer-tso",
                "url": "https://www.cg.tuwien.ac.at/research/publications/ongoing/walchhofer-tso/walchhofer-tso-poster.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/ongoing/walchhofer-tso/walchhofer-tso-poster:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "vis"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/ongoing/walchhofer-tso/",
        "__class": "Publication"
    }
]
