[
    {
        "id": "sorger-2021-egonet",
        "type_id": "journalpaper",
        "tu_id": 300416,
        "repositum_id": "20.500.12708/58630",
        "title": "Egocentric Network Exploration for Immersive Analytics",
        "date": "2021-10",
        "abstract": "To exploit the potential of immersive network analytics for engaging and effective exploration, we promote the metaphor of ``egocentrism'', where data depiction and interaction are adapted to the perspective of the user within a 3D network. Egocentrism has the potential to overcome some of the inherent downsides of virtual environments, e.g., visual clutter and cyber-sickness. To investigate the effect of this metaphor on immersive network exploration, we designed and evaluated interfaces of varying degrees of egocentrism. In a user study, we evaluated the effect of these interfaces on visual search tasks, efficiency of network traversal, spatial orientation, as well as cyber-sickness. Results show that a simple egocentric interface considerably improves visual search efficiency and navigation performance, yet does not decrease spatial orientation or increase cyber-sickness. A distorted occlusion-free view of the neighborhood only marginally improves the user's performance. We tie our findings together in an open online tool for egocentric network exploration, providing actionable insights on the benefits of the egocentric network exploration metaphor.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "teaser",
            "main_file": false,
            "use_in_gallery": true,
            "access": "public",
            "image_width": 2921,
            "image_height": 735,
            "name": "sorger-2021-egonet-teaser.png",
            "type": "image/png",
            "size": 2079618,
            "path": "Publication:sorger-2021-egonet",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2021/sorger-2021-egonet/sorger-2021-egonet-teaser.png",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/sorger-2021-egonet/sorger-2021-egonet-teaser:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1072,
            1705,
            1720,
            853,
            1110
        ],
        "cfp": {
            "name": "Call for Papers _ Pacific Graphics 2021.pdf",
            "type": "application/pdf",
            "error": "0",
            "size": "246798",
            "orig_name": "Call for Papers _ Pacific Graphics 2021.pdf",
            "ext": "pdf"
        },
        "date_from": "2021-10-18",
        "date_to": "2021-10-21",
        "doi": "10.1111/cgf.14417",
        "event": "Pacific Graphics 21",
        "journal": "Computer Graphics Forum",
        "lecturer": [
            1072
        ],
        "location": "Wellington, NZ",
        "open_access": "no",
        "pages": "12",
        "pages_from": "241",
        "pages_to": "252",
        "publisher": "John Wiley and Sons",
        "volume": "40",
        "research_areas": [
            "InfoVis",
            "NetVis",
            "VR"
        ],
        "keywords": [
            "Computer Graphics and Computer-Aided Design"
        ],
        "weblinks": [
            {
                "href": "https://vis.csh.ac.at/egocentricvr/",
                "caption": "online egocentric network",
                "description": "Online tool for egocentric network exploration leveraging insights gathered at the user study",
                "main_file": 1
            }
        ],
        "files": [
            {
                "description": null,
                "filetitle": "teaser",
                "main_file": false,
                "use_in_gallery": true,
                "access": "public",
                "image_width": 2921,
                "image_height": 735,
                "name": "sorger-2021-egonet-teaser.png",
                "type": "image/png",
                "size": 2079618,
                "path": "Publication:sorger-2021-egonet",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2021/sorger-2021-egonet/sorger-2021-egonet-teaser.png",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/sorger-2021-egonet/sorger-2021-egonet-teaser:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "the paper",
                "main_file": true,
                "use_in_gallery": false,
                "access": "public",
                "name": "sorger-2021-egonet-the paper.pdf",
                "type": "application/pdf",
                "size": 4211458,
                "path": "Publication:sorger-2021-egonet",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2021/sorger-2021-egonet/sorger-2021-egonet-the paper.pdf",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/sorger-2021-egonet/sorger-2021-egonet-the paper:thumb{{size}}.png"
            },
            {
                "description": null,
                "filetitle": "video",
                "main_file": true,
                "use_in_gallery": false,
                "access": "public",
                "name": "sorger-2021-egonet-video.mp4",
                "type": "video/mp4",
                "size": 295211748,
                "path": "Publication:sorger-2021-egonet",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2021/sorger-2021-egonet/sorger-2021-egonet-video.mp4",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/sorger-2021-egonet/sorger-2021-egonet-video:thumb{{size}}.png",
                "video_mp4": "https://www.cg.tuwien.ac.at/research/publications/2021/sorger-2021-egonet/sorger-2021-egonet-video:video.mp4"
            }
        ],
        "projects_workgroups": [
            "vis"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2021/sorger-2021-egonet/",
        "__class": "Publication"
    },
    {
        "id": "Honic_2021",
        "type_id": "techreport",
        "tu_id": 299184,
        "repositum_id": "20.500.12708/40378",
        "title": "SCI_BIM Scanning and data capturing for Integrated Resources and Energy Assessment using Building Information Modelling",
        "date": "2021",
        "abstract": "Due to the rapidly increasing consumption of resources and land worldwide, as well as the growing generation of waste, the building stock plays a crucial role not only for the reduction of the energy \n consumption, but also as a future source of materials (urban mining). However, there is a lack of information on the detailed material composition of the building stock, which is the main obstacle for \n modelling and predicting its future use. Therefore, the main research question is whether the use of the digital technologies \"Laser Scanning\" and \"Ground Penetrating Radar\" (GPR) as well as a \n gamification concept, enable to develop and maintain a digital twin (BIM model) which serves as a basis for urban mining.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": null,
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1798,
            1799,
            1904,
            1905,
            1906,
            1907,
            1908,
            1800,
            193,
            1909,
            1910,
            1911,
            240,
            1720
        ],
        "number": "TR-193-02-2021-2",
        "open_access": "yes",
        "pages": "62",
        "research_areas": [],
        "keywords": [],
        "weblinks": [],
        "files": [],
        "projects_workgroups": [
            "rend"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Honic_2021/",
        "__class": "Publication"
    },
    {
        "id": "Kan_Peter-2021-BuildingMonitoring",
        "type_id": "inproceedings",
        "tu_id": 298512,
        "repositum_id": "20.500.12708/58543",
        "title": "Building Information Monitoring via Gamification",
        "date": "2021",
        "abstract": "For efficient facility management it is of high importance to monitor building information, such as energy consumption, indoor temperature, occupancy as well as changes in building structure. In this paper we present a novel methodology for monitoring information about building via gamification. In our approach, the employees of a facility record the states of building elements by playing a competitive mobile game. Traditionally, external sensors are used to automatically collect information about the building usage. In contrast to that, our methodology utilizes personal mobile phones of employees as sensors to identify objects of interest and report their state. Moreover, we propose to use crowdsourcing as a tool for data collection. This way the users of the mobile game are collecting points and compete with each other. At the end of the game the winning team gets the reward. We utilized various gamification strategies to increase motivation of users to collect building data. We ex tended the traditional 3D BIM model with temporal domain to enable tracking of building changes over time. Finally, we run an experiment with real use case building in which the employees used our system for the duration of three months. We studied our approach and our motivation strategies in a post-experiment study. Our results suggest that gamification can be a viable tool for building information monitoring. Additionally, we note that motivation plays a critical role in the data acquisition by gamification.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": "Building information monitoring via gamification - web visualization of measured building properties",
            "filetitle": "model",
            "main_file": false,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 792,
            "image_height": 622,
            "name": "Kan_Peter-2021-BuildingMonitoring-model.jpg",
            "type": "image/jpeg",
            "size": 306766,
            "path": "Publication:Kan_Peter-2021-BuildingMonitoring",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Kan_Peter-2021-BuildingMonitoring/Kan_Peter-2021-BuildingMonitoring-model.jpg",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Kan_Peter-2021-BuildingMonitoring/Kan_Peter-2021-BuildingMonitoring-model:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1720,
            240,
            1798,
            1799
        ],
        "booktitle": "Proceedings of the 16th International Joint Conference on Computer Vision, Imaging and Computer Graphics Theory and Applications - GRAPP",
        "doi": "10.5220/0010288902610270",
        "event": "GRAPP 2021",
        "isbn": "978-989-758-488-6",
        "pages": "10",
        "pages_from": "261",
        "pages_to": "270",
        "publisher": "SciTePress",
        "research_areas": [],
        "keywords": [
            "Gamification",
            "Building Monitoring",
            "Building Information Modeling",
            "Spatial Localization",
            "3D Visualization",
            "Mobile Applications",
            "Crowdsourcing"
        ],
        "weblinks": [
            {
                "href": "https://www.scitepress.org/Papers/2021/102889/102889.pdf",
                "caption": "Paper link",
                "description": null,
                "main_file": 1
            }
        ],
        "files": [
            {
                "description": "Building information monitoring via gamification - web visualization of measured building properties",
                "filetitle": "model",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 792,
                "image_height": 622,
                "name": "Kan_Peter-2021-BuildingMonitoring-model.jpg",
                "type": "image/jpeg",
                "size": 306766,
                "path": "Publication:Kan_Peter-2021-BuildingMonitoring",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Kan_Peter-2021-BuildingMonitoring/Kan_Peter-2021-BuildingMonitoring-model.jpg",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Kan_Peter-2021-BuildingMonitoring/Kan_Peter-2021-BuildingMonitoring-model:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Kan_Peter-2021-BuildingMonitoring/",
        "__class": "Publication"
    },
    {
        "id": "Kan_Peter-2021-MDPI",
        "type_id": "journalpaper_notalk",
        "tu_id": 298527,
        "repositum_id": "20.500.12708/138110",
        "title": "Automatic Interior Design in Augmented Reality Based on Hierarchical Tree of Procedural Rules",
        "date": "2021",
        "abstract": "Augmented reality has a high potential in interior design due to its capability of visualizing numerous prospective designs directly in a target room. In this paper, we present our research on utilization of augmented reality for interactive and personalized furnishing. We propose a new algorithm for automated interior design which generates sensible and personalized furniture configurations. This algorithm is combined with mobile augmented reality system to provide a user with an interactive interior design try-out tool. Personalized design is achieved via a recommender system which uses user preferences and room data as input. We conducted three user studies to explore different aspects of our research. The first study investigated the user preference between augmented reality and on-screen visualization for interactive interior design. In the second user study, we studied the user preference between our algorithm for automated interior design and optimization-based algorithm. Finally, the third study evaluated the probability of sensible design generation by the compared algorithms. The main outcome of our research suggests that augmented reality is viable technology for interactive home furnishing.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "ARDesign",
            "main_file": false,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 1001,
            "image_height": 678,
            "name": "Kan_Peter-2021-MDPI-ARDesign.jpg",
            "type": "image/jpeg",
            "size": 373887,
            "path": "Publication:Kan_Peter-2021-MDPI",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Kan_Peter-2021-MDPI/Kan_Peter-2021-MDPI-ARDesign.jpg",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Kan_Peter-2021-MDPI/Kan_Peter-2021-MDPI-ARDesign:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1720,
            1872,
            923,
            1873
        ],
        "doi": "10.3390/electronics10030245",
        "journal": "Electronics",
        "number": "3",
        "open_access": "yes",
        "pages_from": "1",
        "pages_to": "17",
        "volume": "10",
        "research_areas": [
            "VR"
        ],
        "keywords": [
            "interior design",
            "augmented reality",
            "3D content generation",
            "user study",
            "personalized recommender"
        ],
        "weblinks": [
            {
                "href": "https://www.mdpi.com/2079-9292/10/3/245",
                "caption": "Paper link",
                "description": null,
                "main_file": 0
            }
        ],
        "files": [
            {
                "description": null,
                "filetitle": "ARDesign",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 1001,
                "image_height": 678,
                "name": "Kan_Peter-2021-MDPI-ARDesign.jpg",
                "type": "image/jpeg",
                "size": 373887,
                "path": "Publication:Kan_Peter-2021-MDPI",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Kan_Peter-2021-MDPI/Kan_Peter-2021-MDPI-ARDesign.jpg",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Kan_Peter-2021-MDPI/Kan_Peter-2021-MDPI-ARDesign:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Kan_Peter-2021-MDPI/",
        "__class": "Publication"
    },
    {
        "id": "Mirzaei_Mohammad-2021",
        "type_id": "inproceedings",
        "tu_id": 298519,
        "repositum_id": "20.500.12708/58545",
        "title": "Multi-modal Spatial Object Localization in Virtual Reality for Deaf and Hard-of-Hearing People",
        "date": "2021",
        "abstract": "Information visualization techniques play an important role in Virtual Reality (VR) because they improve task performance, support cognitive processes, and eventually increase the feeling of immersion. Deaf and Hard-of-Hearing (DHH) persons have special needs for information presentation because they feel and perceive VR environments differently. Therefore, it is necessary to pay attention to requirements about presenting information in VR for this group of users. Previous research showed that adding special features and using haptic methods helps DHH persons to do VR tasks better. In this paper, we propose a novel Omni-directional particle visualization method and also evaluate multi-modal presentation methods in VR for DHH persons, such as audio, visual, haptic, and a combination of them (AVH). Additionally, we compare the results with the results of persons without hearing problems. The methods for information presentation in our study focus on spatial object localization in VR. Our user studies show that both DHH persons and persons without hearing problems were able to do VR tasks significantly faster using AVH. Also, we found out that DHH persons can do visual-related VR tasks faster than persons without hearing problems by using our new proposed visualization method. Our results suggest that the benefits of using audio among persons without hearing problems and the benefits of using vision among DHH persons cause an interesting balance in the results of AVH between both groups. Finally, our qualitative and quantitative evaluation indicates that both groups of participants preferred and enjoyed AVH modality more than other modalities.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "directional_visualization",
            "main_file": false,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 755,
            "image_height": 650,
            "name": "Mirzaei_Mohammad-2021-directional_visualization.jpg",
            "type": "image/jpeg",
            "size": 217348,
            "path": "Publication:Mirzaei_Mohammad-2021",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Mirzaei_Mohammad-2021/Mirzaei_Mohammad-2021-directional_visualization.jpg",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Mirzaei_Mohammad-2021/Mirzaei_Mohammad-2021-directional_visualization:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1803,
            1720,
            378
        ],
        "booktitle": "IEEE Virtual Reality and 3D User Interfaces (VR)",
        "doi": "10.1109/VR50410.2021.00084",
        "event": "IEEE VR 2021",
        "pages": "9",
        "pages_from": "588",
        "pages_to": "596",
        "publisher": "IEEE Computer Society",
        "research_areas": [],
        "keywords": [
            "Virtual Reality",
            "Information Presentation",
            "Visualization Techniques",
            "Spatial Object Localization",
            "Deaf and Hard-of- Hearing"
        ],
        "weblinks": [],
        "files": [
            {
                "description": null,
                "filetitle": "directional_visualization",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 755,
                "image_height": 650,
                "name": "Mirzaei_Mohammad-2021-directional_visualization.jpg",
                "type": "image/jpeg",
                "size": 217348,
                "path": "Publication:Mirzaei_Mohammad-2021",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Mirzaei_Mohammad-2021/Mirzaei_Mohammad-2021-directional_visualization.jpg",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Mirzaei_Mohammad-2021/Mirzaei_Mohammad-2021-directional_visualization:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Mirzaei_Mohammad-2021/",
        "__class": "Publication"
    },
    {
        "id": "Mirzaei_Mohammad-2021-HeadUp",
        "type_id": "inproceedings",
        "tu_id": 298516,
        "repositum_id": "20.500.12708/58544",
        "title": "Head Up Visualization of Spatial Sound Sources in Virtual Reality for Deaf and Hard-of-Hearing People",
        "date": "2021",
        "abstract": "This paper presents a novel method for the visualization of 3D spatial sounds in Virtual Reality (VR) for Deaf and Hard-of-Hearing (DHH) people. Our method enhances traditional VR devices with additional haptic and visual feedback, which aids spatial sound localization. The proposed system automatically analyses 3D sound from VR application, and it indicates the direction of sound sources to a user by two Vibro-motors and two Light-Emitting Diodes (LEDs). The benefit of automatic sound analysis is that our method can be used in any VR application without modifying the application itself. We evaluated the proposed method for 3D spatial sound visualization in a user study. Additionally, the conducted user study investigated which condition (corresponding to different senses) leads to faster performance in 3D sound localization task. For this purpose, we compared three conditions: haptic feedback only, LED feedback only, combined haptic and LED feedback. Our study results suggest that DHH participants could complete sound-related VR tasks significantly faster using LED and haptic+LED conditions in comparison to only haptic feedback. The presented method for spatial sound visualization can be directly used to enhance VR applications for use by DHH persons, and the results of our user study can serve as guidelines for the future design of accessible VR systems.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": "Head up display extended with a LEDs (on both sides) for indicating direction of incoming sound",
            "filetitle": "head-up",
            "main_file": false,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 595,
            "image_height": 362,
            "name": "Mirzaei_Mohammad-2021-HeadUp-head-up.jpg",
            "type": "image/jpeg",
            "size": 210940,
            "path": "Publication:Mirzaei_Mohammad-2021-HeadUp",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Mirzaei_Mohammad-2021-HeadUp/Mirzaei_Mohammad-2021-HeadUp-head-up.jpg",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Mirzaei_Mohammad-2021-HeadUp/Mirzaei_Mohammad-2021-HeadUp-head-up:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1803,
            1720,
            378
        ],
        "booktitle": "IEEE Virtual Reality and 3D User Interfaces (VR)",
        "doi": "10.1109/VR50410.2021.00083",
        "event": "IEEE VR 2021",
        "pages": "6",
        "pages_from": "582",
        "pages_to": "587",
        "publisher": "IEEE Computer Society",
        "research_areas": [],
        "keywords": [
            "Virtual Reality",
            "Haptic",
            "Vision",
            "Sound Localization",
            "Deaf",
            "Hard-of-Hearing"
        ],
        "weblinks": [],
        "files": [
            {
                "description": "Head up display extended with a LEDs (on both sides) for indicating direction of incoming sound",
                "filetitle": "head-up",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 595,
                "image_height": 362,
                "name": "Mirzaei_Mohammad-2021-HeadUp-head-up.jpg",
                "type": "image/jpeg",
                "size": 210940,
                "path": "Publication:Mirzaei_Mohammad-2021-HeadUp",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Mirzaei_Mohammad-2021-HeadUp/Mirzaei_Mohammad-2021-HeadUp-head-up.jpg",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Mirzaei_Mohammad-2021-HeadUp/Mirzaei_Mohammad-2021-HeadUp-head-up:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Mirzaei_Mohammad-2021-HeadUp/",
        "__class": "Publication"
    },
    {
        "id": "Mirzaei_Mohhamad-2021",
        "type_id": "journalpaper_notalk",
        "tu_id": 298522,
        "repositum_id": "20.500.12708/138107",
        "title": "Effects of Using Vibrotactile Feedback on Sound Localization by Deaf and Hard-of-Hearing People in Virtual Environments",
        "date": "2021",
        "abstract": "Sound source localization is important for spatial awareness and immersive Virtual Reality (VR) experiences. Deaf and Hard-of-Hearing (DHH) persons have limitations in completing sound-related VR tasks efficiently because they perceive audio information differently. This paper presents and evaluates a special haptic VR suit that helps DHH persons efficiently complete sound-related VR tasks. Our proposed VR suit receives sound information from the VR environment wirelessly and indicates the direction of the sound source to the DHH user by using vibrotactile feedback. Our study suggests that using different setups of the VR suit can significantly improve VR task completion times compared to not using a VR suit. Additionally, the results of mounting haptic devices on different positions of users´ bodies indicate that DHH users can complete a VR task significantly faster when two vibro-motors are mounted on their arms and ears compared to their thighs. Our quantitative and qualitative analysis demonstrates that DHH persons prefer using the system without the VR suit and prefer mounting vibro-motors in their ears. In an additional study, we did not find a significant difference in task completion time when using four vibro-motors with the VR suit compared to using only two vibro-motors in users´ ears without the VR suit.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "HapticSuit",
            "main_file": false,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 670,
            "image_height": 697,
            "name": "Mirzaei_Mohhamad-2021-HapticSuit.jpg",
            "type": "image/jpeg",
            "size": 212299,
            "path": "Publication:Mirzaei_Mohhamad-2021",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Mirzaei_Mohhamad-2021/Mirzaei_Mohhamad-2021-HapticSuit.jpg",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Mirzaei_Mohhamad-2021/Mirzaei_Mohhamad-2021-HapticSuit:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1803,
            1720,
            378
        ],
        "doi": "10.3390/electronics10222794",
        "journal": "Electronics",
        "number": "22",
        "open_access": "yes",
        "pages_from": "1",
        "pages_to": "14",
        "volume": "10",
        "research_areas": [],
        "keywords": [
            "virtual reality",
            "haptic feedback",
            "tactile sensation",
            "sound source localization",
            "deaf and hard-of-hearing"
        ],
        "weblinks": [
            {
                "href": "https://www.mdpi.com/2079-9292/10/22/2794",
                "caption": "Paper link",
                "description": null,
                "main_file": 0
            }
        ],
        "files": [
            {
                "description": null,
                "filetitle": "HapticSuit",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 670,
                "image_height": 697,
                "name": "Mirzaei_Mohhamad-2021-HapticSuit.jpg",
                "type": "image/jpeg",
                "size": 212299,
                "path": "Publication:Mirzaei_Mohhamad-2021",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Mirzaei_Mohhamad-2021/Mirzaei_Mohhamad-2021-HapticSuit.jpg",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Mirzaei_Mohhamad-2021/Mirzaei_Mohhamad-2021-HapticSuit:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Mirzaei_Mohhamad-2021/",
        "__class": "Publication"
    },
    {
        "id": "Reisinger_Julia-2021-parametricscript",
        "type_id": "inproceedings",
        "tu_id": 296846,
        "repositum_id": "20.500.12708/62967",
        "title": "Framework proposal for automated generation of production layout scenarios: A parametric design technique to connect production planning and structural industrial building design",
        "date": "2021",
        "abstract": "To increase the flexibility and expandability of production plants the focus needs to be on a coherent planning of the production layout and building systems. The frequent reconfiguration of production layouts bears challenges on the load-bearing structure of industrial buildings, decreasing the building service life due to rescheduling or demolition. Currently there is no method established to integrate production layout planning into structural building design processes. In this paper, a novel parametric generative design method for automated production layout generation and optimisation (PLGO) is presented, producing layout scenarios to be respected in structural building design. Results of a state-of-the-art analysis and a case study methodology are combined to develop a novel concept of integrated production cubes (IPC). The IPC concept is translated into a parametric PLGO framework, which is tested on a pilot-project of a food-and hygiene production facility and the defined objectives and constraints are validated.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "Production layout",
            "main_file": false,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 286,
            "image_height": 226,
            "name": "Reisinger_Julia-2021-parametricscript-Production layout.jpg",
            "type": "image/jpeg",
            "size": 61797,
            "path": "Publication:Reisinger_Julia-2021-parametricscript",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Reisinger_Julia-2021-parametricscript/Reisinger_Julia-2021-parametricscript-Production layout.jpg",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Reisinger_Julia-2021-parametricscript/Reisinger_Julia-2021-parametricscript-Production layout:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1874,
            1875,
            1799,
            1720,
            1876
        ],
        "booktitle": "EG-ICE 2021 Workshop on Intelligent Computing in Engineering",
        "date_from": "2021-06-30",
        "date_to": "2021-07-02",
        "doi": "10.14279/depositonce-12021",
        "event": "28th EG-ICE International Workshop on Intelligent Computing in Engineering 2021",
        "isbn": "978-3-7983-3212-6",
        "location": "Berlin, Deutschland",
        "pages": "12",
        "pages_from": "22",
        "pages_to": "33",
        "publisher": "Universitätsverlag der TU Berlin",
        "research_areas": [],
        "keywords": [
            "Parametric modelling",
            "multi-objective optimisation",
            "production layout planning",
            "automated layout generation",
            "generative design",
            "integrated industrial building design"
        ],
        "weblinks": [
            {
                "href": "https://publik.tuwien.ac.at/files/publik_296846.pdf",
                "caption": "Paper link",
                "description": null,
                "main_file": 0
            }
        ],
        "files": [
            {
                "description": null,
                "filetitle": "Production layout",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 286,
                "image_height": 226,
                "name": "Reisinger_Julia-2021-parametricscript-Production layout.jpg",
                "type": "image/jpeg",
                "size": 61797,
                "path": "Publication:Reisinger_Julia-2021-parametricscript",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Reisinger_Julia-2021-parametricscript/Reisinger_Julia-2021-parametricscript-Production layout.jpg",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Reisinger_Julia-2021-parametricscript/Reisinger_Julia-2021-parametricscript-Production layout:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "BimFlexi"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Reisinger_Julia-2021-parametricscript/",
        "__class": "Publication"
    },
    {
        "id": "Reisinger-2021-JOBE",
        "type_id": "journalpaper_notalk",
        "tu_id": 298813,
        "repositum_id": "20.500.12708/138206",
        "title": "Integrated multi-objective evolutionary optimization of production layout scenarios for parametric structural design of flexible industrial buildings",
        "date": "2021",
        "abstract": "Due to product individualization, customization and rapid technological advances in manufacturing, production systems are faced with frequent reconfiguration and expansion. Industrial buildings that allow changing production scenarios require flexible load-bearing structures and a coherent planning of the production layout and building systems. Yet, current production planning and structural building design are mostly sequential and the data and models lack interoperability. In this paper, a novel parametric evolutionary design method for automated production layout generation and optimization (PLGO) is presented, producing layout scenarios to be respected in structural building design. Results of a state-of-the-art analysis and a case study are combined to develop a novel concept of integrated production cubes and the design space for PLGO as basis for a parametric production layout design method. The integrated production cubes concept is then translated into a parametric PLGO framework, which is tested on a pilot-project of a hygiene production facility to evaluate the framework and validate the defined constraints and objectives. Results suggest that our framework can produce feasible production layout scenarios which respect flexibility and building requirements. In future research the design process will be extended by the development of a multi-objective evolutionary optimization process for industrial buildings to provide flexible building solutions that can accommodate a selection of several prioritized production layouts.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "IPC",
            "main_file": false,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 616,
            "image_height": 523,
            "name": "Reisinger-2021-JOBE-IPC.jpg",
            "type": "image/jpeg",
            "size": 146214,
            "path": "Publication:Reisinger-2021-JOBE",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Reisinger-2021-JOBE/Reisinger-2021-JOBE-IPC.jpg",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Reisinger-2021-JOBE/Reisinger-2021-JOBE-IPC:thumb{{size}}.png"
        },
        "sync_repositum_override": "projects",
        "repositum_presentation_id": null,
        "authors": [
            1874,
            1875,
            1799,
            1720,
            1876,
            378
        ],
        "doi": "10.1016/j.jobe.2021.103766",
        "issn": "2352-7102",
        "journal": "Journal of Building Engineering",
        "number": "103766",
        "pages_from": "1",
        "pages_to": "18",
        "volume": "46",
        "research_areas": [],
        "keywords": [
            "Parametric modelling",
            "Multi-objective optimization",
            "Layout planning",
            "Automated production layout generation",
            "Evolutionary algorithm",
            "Integrated industrial building design"
        ],
        "weblinks": [
            {
                "href": "https://www.sciencedirect.com/science/article/pii/S2352710221016247?via%3Dihub",
                "caption": "Paper link",
                "description": null,
                "main_file": 0
            }
        ],
        "files": [
            {
                "description": null,
                "filetitle": "IPC",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 616,
                "image_height": 523,
                "name": "Reisinger-2021-JOBE-IPC.jpg",
                "type": "image/jpeg",
                "size": 146214,
                "path": "Publication:Reisinger-2021-JOBE",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Reisinger-2021-JOBE/Reisinger-2021-JOBE-IPC.jpg",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2021/Reisinger-2021-JOBE/Reisinger-2021-JOBE-IPC:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "BimFlexi"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2021/Reisinger-2021-JOBE/",
        "__class": "Publication"
    },
    {
        "id": "Sebernegg2020",
        "type_id": "techreport",
        "tu_id": null,
        "repositum_id": "20.500.12708/40238",
        "title": "Motion Similarity Modeling - A State of the Art Report",
        "date": "2020-08",
        "abstract": null,
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "MoSiMo",
            "main_file": false,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 1278,
            "image_height": 887,
            "name": "Sebernegg2020-MoSiMo.jpg",
            "type": "image/jpeg",
            "size": 148448,
            "path": "Publication:Sebernegg2020",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2020/Sebernegg2020/Sebernegg2020-MoSiMo.jpg",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/Sebernegg2020/Sebernegg2020-MoSiMo:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1804,
            1720,
            378
        ],
        "number": "TR-193-02-2020-5",
        "open_access": "yes",
        "research_areas": [
            "VR"
        ],
        "keywords": [],
        "weblinks": [
            {
                "href": "https://arxiv.org/abs/2008.05872",
                "caption": "arXiv",
                "description": null,
                "main_file": 1
            }
        ],
        "files": [
            {
                "description": null,
                "filetitle": "MoSiMo",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 1278,
                "image_height": 887,
                "name": "Sebernegg2020-MoSiMo.jpg",
                "type": "image/jpeg",
                "size": 148448,
                "path": "Publication:Sebernegg2020",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2020/Sebernegg2020/Sebernegg2020-MoSiMo.jpg",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/Sebernegg2020/Sebernegg2020-MoSiMo:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "vr"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2020/Sebernegg2020/",
        "__class": "Publication"
    },
    {
        "id": "Mirzaei_Mohammadreza_2020-EVR",
        "type_id": "journalpaper",
        "tu_id": 291029,
        "repositum_id": "20.500.12708/140902",
        "title": "EarVR: Using Ear Haptics in Virtual Reality for Deaf and Hard-of-Hearing People",
        "date": "2020-05",
        "abstract": "Virtual Reality (VR) has a great potential to improve skills of Deaf and Hard-of-Hearing (DHH) people. Most VR applications and devices are designed for persons without hearing problems. Therefore, DHH persons have many limitations when using VR. Adding special features in a VR environment, such as subtitles, or haptic devices will help them. Previously, it was necessary to design a special VR environment for DHH persons. We introduce and evaluate a new prototype called \"EarVR\" that can be mounted on any desktop or mobile VR Head-Mounted Display (HMD). EarVR analyzes 3D sounds in a VR environment and locates the direction of the sound source that is closest to a user. It notifies the user about the sound direction using two vibro-motors placed on the user's ears. EarVR helps DHH persons to complete sound-based VR tasks in any VR application with 3D audio and a mute option for background music. Therefore, DHH persons can use all VR applications with 3D audio, not only those applications designed for them. Our user study shows that DHH participants were able to complete a simple VR task significantly faster with EarVR than without. The completion time of DHH participants was very close to participants without hearing problems. Also, it shows that DHH participants were able to finish a complex VR task with EarVR, while without it, they could not finish the task even once. Finally, our qualitative and quantitative evaluation among DHH participants indicates that they preferred to use EarVR and it encouraged them to use VR technology more.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": "EarVR",
            "filetitle": "EarVR",
            "main_file": false,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 1110,
            "image_height": 602,
            "name": "Mirzaei_Mohammadreza_2020-EVR-EarVR.jpg",
            "type": "image/jpeg",
            "size": 499544,
            "path": "Publication:Mirzaei_Mohammadreza_2020-EVR",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2020/Mirzaei_Mohammadreza_2020-EVR/Mirzaei_Mohammadreza_2020-EVR-EarVR.jpg",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/Mirzaei_Mohammadreza_2020-EVR/Mirzaei_Mohammadreza_2020-EVR-EarVR:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1803,
            1720,
            378
        ],
        "cfp": {
            "name": "cfp2020.pdf",
            "type": "application/pdf",
            "error": "0",
            "size": "516819",
            "orig_name": "cfp2020.pdf",
            "ext": "pdf"
        },
        "date_from": "2020-03-22",
        "date_to": "2020-03-26",
        "doi": "10.1109/TVCG.2020.2973441",
        "event": "IEEE  VR 2021",
        "journal": "IEEE Transactions on Visualization and Computer Graphics",
        "lecturer": [
            378
        ],
        "number": "05",
        "open_access": "no",
        "pages_from": "2084",
        "pages_to": "2093",
        "volume": "26",
        "research_areas": [
            "VR"
        ],
        "keywords": [
            "Handicapped Aids",
            "Haptic Interfaces",
            "Helmet Mounted Displays",
            "Virtual Reality",
            "3 D Sounds",
            "3 D Audio",
            "Deaf And Hard Of Hearing People",
            "Head Mounted Display",
            "VR Application",
            "Ear VR",
            "VR Technology",
            "Haptic Devices",
            "DHH Persons",
            "Hearing Problems",
            "VR Apps."
        ],
        "weblinks": [
            {
                "href": "https://www.computer.org/csdl/journal/tg/2020/05/08998298/1hrXce2Kmhq",
                "caption": "TVCG",
                "description": null,
                "main_file": 1
            }
        ],
        "files": [
            {
                "description": "EarVR",
                "filetitle": "EarVR",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 1110,
                "image_height": 602,
                "name": "Mirzaei_Mohammadreza_2020-EVR-EarVR.jpg",
                "type": "image/jpeg",
                "size": 499544,
                "path": "Publication:Mirzaei_Mohammadreza_2020-EVR",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2020/Mirzaei_Mohammadreza_2020-EVR/Mirzaei_Mohammadreza_2020-EVR-EarVR.jpg",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2020/Mirzaei_Mohammadreza_2020-EVR/Mirzaei_Mohammadreza_2020-EVR-EarVR:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "vr"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2020/Mirzaei_Mohammadreza_2020-EVR/",
        "__class": "Publication"
    },
    {
        "id": "adolf-2019-jug",
        "type_id": "inproceedings",
        "tu_id": 283044,
        "repositum_id": null,
        "title": "Juggling in VR: Advantages of Immersive Virtual Reality in Juggling Learning",
        "date": "2019-11",
        "abstract": null,
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "",
            "main_file": false,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 462,
            "image_height": 279,
            "name": "adolf-2019-jug-.jpg",
            "type": "image/jpeg",
            "size": 112678,
            "path": "Publication:adolf-2019-jug",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2019/adolf-2019-jug/adolf-2019-jug-.jpg",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2019/adolf-2019-jug/adolf-2019-jug-:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1721,
            1720,
            1722,
            378,
            1723,
            1724
        ],
        "booktitle": "25th ACM Symposium on Virtual Reality Software and Technology",
        "cfp": {
            "name": "vrstcfp.pdf",
            "type": "application/pdf",
            "error": "0",
            "size": "1742542",
            "orig_name": "vrstcfp.pdf",
            "ext": "pdf"
        },
        "event": "25th ACM Symposium on Virtual Reality Software and Technology",
        "lecturer": [
            378
        ],
        "open_access": "no",
        "pages_from": "1",
        "pages_to": "5",
        "publisher": "ACM",
        "research_areas": [
            "VR"
        ],
        "keywords": [],
        "weblinks": [
            {
                "href": "https://dl.acm.org/citation.cfm?id=3364246",
                "caption": "ACM",
                "description": null,
                "main_file": 1
            }
        ],
        "files": [
            {
                "description": null,
                "filetitle": "",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 462,
                "image_height": 279,
                "name": "adolf-2019-jug-.jpg",
                "type": "image/jpeg",
                "size": 112678,
                "path": "Publication:adolf-2019-jug",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2019/adolf-2019-jug/adolf-2019-jug-.jpg",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2019/adolf-2019-jug/adolf-2019-jug-:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "vr"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2019/adolf-2019-jug/",
        "__class": "Publication"
    },
    {
        "id": "kan-2019-dli",
        "type_id": "journalpaper",
        "tu_id": 280958,
        "repositum_id": null,
        "title": "DeepLight: Light Source Estimation for Augmented Reality using Deep Learning",
        "date": "2019-06",
        "abstract": "This paper presents a novel method for illumination estimation from RGB-D images. The main focus of the proposed method is to enhance visual coherence in augmented reality applications by providing accurate and temporally coherent estimates of real illumination. For this purpose, we designed and trained a deep neural network which calculates a dominant light direction from a single RGB-D image. Additionally, we propose a novel method for real-time outlier detection to achieve temporally coherent estimates. Our method for light source estimation in augmented reality was evaluated on the set of real scenes. Our results demonstrate that the neural network can successfully estimate light sources even in scenes which were not seen by the network during training. Moreover, we compared our results with illumination estimates calculated by the state-of-the-art method for illumination estimation. Finally, we demonstrate the applicability of our method on numerous augmented reality scenes.",
        "authors_et_al": false,
        "substitute": null,
        "main_image": {
            "description": null,
            "filetitle": "",
            "main_file": false,
            "use_in_gallery": false,
            "access": "public",
            "image_width": 627,
            "image_height": 458,
            "name": "kan-2019-dli-.jpg",
            "type": "image/jpeg",
            "size": 220193,
            "path": "Publication:kan-2019-dli",
            "url": "https://www.cg.tuwien.ac.at/research/publications/2019/kan-2019-dli/kan-2019-dli-.jpg",
            "thumb_image_sizes": [
                16,
                64,
                100,
                175,
                300,
                600
            ],
            "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2019/kan-2019-dli/kan-2019-dli-:thumb{{size}}.png"
        },
        "sync_repositum_override": null,
        "repositum_presentation_id": null,
        "authors": [
            1720,
            378
        ],
        "cfp": {
            "name": "CGI-Proceeding-CFP-v5.pdf",
            "type": "application/pdf",
            "error": "0",
            "size": "314155",
            "orig_name": "CGI-Proceeding-CFP-v5.pdf",
            "ext": "pdf"
        },
        "date_from": "2019",
        "date_to": "2019",
        "doi": "10.1007/s00371-019-01666-x",
        "event": " Computer Graphics International 2019",
        "journal": "The Visual Computer",
        "lecturer": [
            378
        ],
        "number": "6",
        "open_access": "yes",
        "pages_from": "873",
        "pages_to": "883",
        "volume": "35",
        "research_areas": [
            "VR"
        ],
        "keywords": [
            "Light source estimation",
            "Augmented reality",
            "Photometric registration",
            "Deep learning"
        ],
        "weblinks": [
            {
                "href": "https://link.springer.com/article/10.1007/s00371-019-01666-x",
                "caption": null,
                "description": null,
                "main_file": 1
            }
        ],
        "files": [
            {
                "description": null,
                "filetitle": "",
                "main_file": false,
                "use_in_gallery": false,
                "access": "public",
                "image_width": 627,
                "image_height": 458,
                "name": "kan-2019-dli-.jpg",
                "type": "image/jpeg",
                "size": 220193,
                "path": "Publication:kan-2019-dli",
                "url": "https://www.cg.tuwien.ac.at/research/publications/2019/kan-2019-dli/kan-2019-dli-.jpg",
                "thumb_image_sizes": [
                    16,
                    64,
                    100,
                    175,
                    300,
                    600
                ],
                "thumb_url": "https://www.cg.tuwien.ac.at/research/publications/2019/kan-2019-dli/kan-2019-dli-:thumb{{size}}.png"
            }
        ],
        "projects_workgroups": [
            "vr"
        ],
        "url": "https://www.cg.tuwien.ac.at/research/publications/2019/kan-2019-dli/",
        "__class": "Publication"
    }
]
