@inproceedings{stappen_SteFAS, title = "Temporally Stable Content-Adaptive and Spatio-Temporal Shading Rate Assignment for Real-Time Applications", author = "Stefan Stappen and Johannes Unterguggenberger and Bernhard Kerbl and Michael Wimmer", year = "2021", abstract = "We propose two novel methods to improve the efficiency and quality of real-time rendering applications: Texel differential-based content-adaptive shading (TDCAS) and spatio-temporally filtered adaptive shading (STeFAS). Utilizing Variable Rate Shading (VRS)-a hardware feature introduced with NVIDIA's Turing micro-architecture-and properties derived during rendering or Temporal Anti-Aliasing (TAA), our techniques adapt the resolution to improve the performance and quality of real-time applications. VRS enables different shading resolution for different regions of the screen during a single render pass. In contrast to other techniques, TDCAS and STeFAS have very little overhead for computing the shading rate. STeFAS enables up to 4x higher rendering resolutions for similar frame rates, or a performance increase of 4× at the same resolution.", month = oct, isbn = "978-3-03868-162-5", publisher = "Eurographics Association", organization = "The Eurographics Association", location = "online", event = "Pacific Graphics 2021", editor = "Lee, Sung-Hee and Zollmann, Stefanie and Okabe, Makoto and W\"{u}nsche, Burkhard", doi = "10.2312/pg.20211391", booktitle = "Pacific Graphics Short Papers, Posters, and Work-in-Progress Papers", pages = "2", pages = "65--66", keywords = "variable rate shading, temporal antialiasing", URL = "https://www.cg.tuwien.ac.at/research/publications/2021/stappen_SteFAS/", } @article{unterguggenberger-2021-msh, title = "Conservative Meshlet Bounds for Robust Culling of Skinned Meshes", author = "Johannes Unterguggenberger and Bernhard Kerbl and Jakob Pernsteiner and Michael Wimmer", year = "2021", abstract = "Following recent advances in GPU hardware development and newly introduced rendering pipeline extensions, the segmentation of input geometry into small geometry clusters-so-called meshlets-has emerged as an important practice for efficient rendering of complex 3D models. Meshlets can be processed efficiently using mesh shaders on modern graphics processing units, in order to achieve streamlined geometry processing in just two tightly coupled shader stages that allow for dynamic workload manipulation in-between. The additional granularity layer between entire models and individual triangles enables new opportunities for fine-grained visibility culling methods. However, in contrast to static models, view frustum and backface culling on a per-meshlet basis for skinned, animated models are difficult to achieve while respecting the conservative spatio-temporal bounds that are required for robust rendering results. In this paper, we describe a solution for computing and exploiting relevant conservative bounds for culling meshlets of models that are animated using linear blend skinning. By enabling visibility culling for animated meshlets, our approach can help to improve rendering performance and alleviate bottlenecks in the notoriously performanceand memory-intensive skeletal animation pipelines of modern real-time graphics applications.", month = oct, journal = "Computer Graphics Forum", volume = "40", number = "7", issn = "1467-8659", doi = "10.1111/cgf.14401", booktitle = "Computer Graphics Forum", pages = "13", publisher = "Eurographics Association", pages = "57--69", keywords = "real-time rendering, meshlet, mesh shader, task shader, view frustum culling, backface culling, Vulkan, vertex skinning, animation, conservative bounds, bounding boxes, Rodrigues' rotation formula, spatio-temporal bounds", URL = "https://www.cg.tuwien.ac.at/research/publications/2021/unterguggenberger-2021-msh/", } @inproceedings{unterguggenberger-2020-fmvr, title = "Fast Multi-View Rendering for Real-Time Applications", author = "Johannes Unterguggenberger and Bernhard Kerbl and Markus Steinberger and Dieter Schmalstieg and Michael Wimmer", year = "2020", abstract = "Efficient rendering of multiple views can be a critical performance factor for real-time rendering applications. Generating more than one view multiplies the amount of rendered geometry, which can cause a huge performance impact. Minimizing that impact has been a target of previous research and GPU manufacturers, who have started to equip devices with dedicated acceleration units. However, vendor-specific acceleration is not the only option to increase multi-view rendering (MVR) performance. Available graphics API features, shader stages and optimizations can be exploited for improved MVR performance, while generally offering more versatile pipeline configurations, including the preservation of custom tessellation and geometry shaders. In this paper, we present an exhaustive evaluation of MVR pipelines available on modern GPUs. We provide a detailed analysis of previous techniques, hardware-accelerated MVR and propose a novel method, leading to the creation of an MVR catalogue. Our analyses cover three distinct applications to help gain clarity on overall MVR performance characteristics. Our interpretation of the observed results provides a guideline for selecting the most appropriate one for various use cases on different GPU architectures.", month = may, isbn = "978-3-03868-107-6", organization = "Eurographics", location = "online", event = "EGPGV 2020", editor = "Frey, Steffen and Huang, Jian and Sadlo, Filip", doi = "10.2312/pgv.20201071", booktitle = "Eurographics Symposium on Parallel Graphics and Visualization", pages = "13--23", keywords = "Real-Time Rendering, Rasterization, Multi-View, OVR_multiview, Geometry Shader, Evaluation", URL = "https://www.cg.tuwien.ac.at/research/publications/2020/unterguggenberger-2020-fmvr/", } @mastersthesis{unterguggenberger-2016-realmar, title = "Realistic Rendering in Mobile Augmented Reality", author = "Johannes Unterguggenberger", year = "2016", abstract = "Augmented Reality (AR) applications combine a view of a physical, real-world environment with computer-generated objects and effects in real-time. Depending on the application, it is desirable to maximize the visual coherence of the virtual objects compared to the real-world image. To achieve this goal, virtual objects have to be rendered as realistically as possible. This thesis presents an image-based lighting (IBL) technique for realistic rendering of virtual objects on mobile devices which uses lighting information from the real-world environment. In the first step, the presented technique uses a mobile device’s camera and motion sensors to capture an omni-directional image of the surrounding in high dynamic range (HDR) and stores it in an environment map. In the second step, the captured environment map is prepared for rendering with different materials by calculating a set of maps. During rendering, the most suitable of these maps are selected for each material and used for shading a virtual object with the specific material. The map which contains diffuse illumination information is called irradiance map, and the maps which contain glossy or specular illumination information are called reflection maps. The calculation of the maps corresponds to a weighted convolution. The weighting is determined by a reflection model which takes the correct amount of incident lighting from all directions into account. How these calculations can be performed efficiently on mobile devices is the main focus of this thesis. Multiple approaches to perform the calculations are described. Their properties, results, strengths and weaknesses are analyzed and optimizations are proposed. We describe three different approaches for the calculation of irradiance and reflection maps in this thesis: the accurate calculation, a MIP-mapping based approximation method, and calculation via spherical harmonics (SH) frequency space. We provide detailed implementation instructions, analyses, and discussions for each of these approaches with regard to the properties and limitations of mobile devices. Furthermore, we describe how the calculated maps can be used with IBL rendering and be combined with established rendering techniques to achieve a high degree of visual coherence of virtual objects in AR scenes. The main novelty of this thesis is its focus on the capabilities of mobile devices. We describe how to do all steps on a single commodity mobile device: From capturing the environment at a certain point in space, to calculating the irradiance and reflection maps, and finally rendering virtual objects using the calculated maps in an AR scene.", month = oct, address = "Favoritenstrasse 9-11/E193-02, A-1040 Vienna, Austria", school = "Institute of Computer Graphics and Algorithms, Vienna University of Technology ", keywords = "Augmented Reality, Mobile, Vuforia, Irradiance Mapping, Reflection Mapping, Spherical Harmonics", URL = "https://www.cg.tuwien.ac.at/research/publications/2016/unterguggenberger-2016-realmar/", } @studentproject{unterguggenberger-2010-cmph, title = "Connected meshes for procedural humans", author = "Johannes Unterguggenberger", year = "2010", keywords = "shape grammars, connectivity, procedural", URL = "https://www.cg.tuwien.ac.at/research/publications/2010/unterguggenberger-2010-cmph/", }