@phdthesis{knecht_2013_RSM, title = "Reciprocal Shading for Mixed Reality", author = "Martin Knecht", year = "2013", abstract = "Reciprocal shading for mixed reality aims to integrate virtual objects into real environments in a way that they are in the ideal case indistinguishable from real objects. It is therefore an attractive technology for architectural visualizations, product visualizations and for cultural heritage sites, where virtual objects should be seamlessly merged with real ones. Due to the improved performance of recent graphics hardware, real-time global illumination algorithms are feasible for mixed-reality applications, and thus more and more researchers address realistic rendering for mixed reality. The goal of this thesis is to provide algorithms which improve the visual plausibility of virtual objects in mixed-reality applications. Our contributions are as follows: First, we present five methods to reconstruct the real surrounding environment. In particular, we present two methods for geometry reconstruction, a method for material estimation at interactive frame rates and two methods to reconstruct the color mapping characteristics of the video see-through camera. Second, we present two methods to improve the visual appearance of virtual objects. The first, called differential instant radiosity, combines differential rendering with a global illumination method called instant radiosity to simulate reciprocal shading effects such as shadowing and indirect illumination between real and virtual objects. The second method focuses on the visual plausible rendering of reflective and refractive objects. The high-frequency lighting effects caused by these objects are also simulated with our method. The third part of this thesis presents two user studies which evaluate the influence of the presented rendering methods on human perception. The first user study measured task performance with respect to the rendering mode, and the second user study was set up as a web survey where participants had to choose which of two presented images, showing mixed-reality scenes, they preferred.", month = dec, address = "Favoritenstrasse 9-11/E193-02, A-1040 Vienna, Austria", school = "Institute of Computer Graphics and Algorithms, Vienna University of Technology ", URL = "https://www.cg.tuwien.ac.at/research/publications/2013/knecht_2013_RSM/", } @bachelorsthesis{rasch_martina-2013-HDRImage, title = "HDR Image Acquisition for Augmented Reality", author = "Martina Rasch", year = "2013", abstract = "In this thesis I present a method for calculating high dynamic range images in a mixed reality system. Cameras and monitors usually have a lower dynamic range than we encounter in the real world, e.g. the sun. While pictures have a maximal contrast of 1:500, real world scenes often have a contrast of 1:100 000. An image taken of a scene with a higher dynamic range than our camera can capture will have regions that are too bright or too dark. With a higher exposure time more details will be visible in dark regions and with a lower exposure time more details will be visible in bright regions. Since our camera cannot create an image preserving details in both dark and bright regions we have to calculate one using the images our camera can actually produce. The method described in this thesis is based on the work of Debevec and Malik. It takes several images taken with different exposure times and combines them to a high dynamic range image, leading to a better viewing experience in our RESHADE framework, a mixed reality framework, for which this method was implemented.", month = sep, address = "Favoritenstrasse 9-11/E193-02, A-1040 Vienna, Austria", school = "Institute of Computer Graphics and Algorithms, Vienna University of Technology ", keywords = "mixed reality, high dynamic range image", URL = "https://www.cg.tuwien.ac.at/research/publications/2013/rasch_martina-2013-HDRImage/", } @article{knecht_martin_2013_ReflRefrObjsMR, title = "Reflective and Refractive Objects for Mixed Reality", author = "Martin Knecht and Christoph Traxler and Christoph Winklhofer and Michael Wimmer", year = "2013", abstract = "In this paper, we present a novel rendering method which integrates reflective or refractive objects into a differential instant radiosity (DIR) framework usable for mixed-reality (MR) applications. This kind of objects are very special from the light interaction point of view, as they reflect and refract incident rays. Therefore they may cause high-frequency lighting effects known as caustics. Using instant-radiosity (IR) methods to approximate these high-frequency lighting effects would require a large amount of virtual point lights (VPLs) and is therefore not desirable due to real-time constraints. Instead, our approach combines differential instant radiosity with three other methods. One method handles more accurate reflections compared to simple cubemaps by using impostors. Another method is able to calculate two refractions in real-time, and the third method uses small quads to create caustic effects. Our proposed method replaces parts in light paths that belong to reflective or refractive objects using these three methods and thus tightly integrates into DIR. In contrast to previous methods which introduce reflective or refractive objects into MR scenarios, our method produces caustics that also emit additional indirect light. The method runs at real-time frame rates, and the results show that reflective and refractive objects with caustics improve the overall impression for MR scenarios.", month = mar, journal = "IEEE Transactions on Visualization and Computer Graphics (Proceedings of IEEE VR 2013)", volume = "19", number = "4", issn = "1077-2626", pages = "576--582", keywords = "Mixed Reality, Caustics, Reflections, Refractions", URL = "https://www.cg.tuwien.ac.at/research/publications/2013/knecht_martin_2013_ReflRefrObjsMR/", } @bachelorsthesis{jahrmann_klemens_KFR, title = "Kinect Fusion - Reconstruction", author = "Klemens Jahrmann", year = "2013", abstract = "The procedure of collecting 3D data via an input device and processing it to a virtual 3D model is called 3D reconstruction. It is a widely used technique in visual computing, since modern applications like games or visualizations tend to be more and more photo-realistic leading to high costs in content creation. By using 3D reconstruction high quality geometry can be generated out of real objects. However to obtain good reconstructions special hardware is needed which is very expensive. Since Microsoft released the Kinect camera, which has a depth sensor in addition to the RGB-sensor, a quite cheap hardware is available that is able to extract 3D data of its surroundings. KinectFusion also developed by Microsoft is a technique that uses the Kinect camera for 3D reconstruction in real-time. In order to achieve real-time speed the algorithm is executed almost exclusively on the graphics card. Each frame the algorithm first gathers the information from the Kinect and processes it. After that it measures the camera’s position in space and fills a 3D volume with surface data. Finally a raycasting algorithm is used to extract isosurfaces out of the volume. During the work on the thesis we implemented the KinectFusion algorithm inside the RESHADE framework. The results and the implementation itself are presented as part of the thesis.", month = feb, address = "Favoritenstrasse 9-11/E193-02, A-1040 Vienna, Austria", school = "Institute of Computer Graphics and Algorithms, Vienna University of Technology ", keywords = "Reconstruction, Kinect Fusion", URL = "https://www.cg.tuwien.ac.at/research/publications/2013/jahrmann_klemens_KFR/", } @studentproject{laager_florian-2013-camr, title = "Camera Artifacts in Mixed Reality", author = "Florian Laager", year = "2013", abstract = "Simulating camera artifacts for better immersion of virtual objects in a real environment", keywords = "Mixed Reality, Camera Artifacts", URL = "https://www.cg.tuwien.ac.at/research/publications/2013/laager_florian-2013-camr/", } @mastersthesis{winklhofer_christoph-2013-RRMR, title = "Reflections, Refractions and Caustics in a Mixed-Reality Environment", author = "Christoph Winklhofer", year = "2013", abstract = "In a mixed-reality environment virtual objects are merged into a real scene. Such an augmentation with virtual objects offers great possibilities to present content in new and innovative ways. The visual appearance of these virtual objects depends on a plausible lighting simulation. Otherwise, virtual objects look artificial and out of place, which destroys the overall impression of the perceived scene. Reflective and refractive objects are an inherent part of our physical environment. Accordingly, virtual objects of this type also enhance the overall impression and scope of a mixed-reality application. Many mixed-reality systems still neglect them: Such objects require a complex light simulation that is hard to embed in a mixed-reality system, which demands real-time frame rates to handle the user interaction. This thesis describes the integration of reflective and refractive objects in a mixed-reality environment. The aim is to create a realistic light distribution that simulates reflections and refractions between real and virtual objects. Another important aspect for a believable perception are caustics, light focusing due to the scattering from reflective or refractive objects. Until recently, this effect was simply excluded in the lighting simulation of mixed-reality systems. The proposed rendering method extends differential instant radiosity with three other image space rendering techniques capable to handle reflections, refractions and caustics in real time. By combining these techniques, our method successfully simulates the various lighting effects from reflective and refractive objects and is able to handle user interactions at interactive to realtime frame rates. This offers a practicable possibility to greatly improve the visual quality of a mixed-reality environment.", address = "Favoritenstrasse 9-11/E193-02, A-1040 Vienna, Austria", school = "Institute of Computer Graphics and Algorithms, Vienna University of Technology ", keywords = "Caustics, Reflections, Mixed Reality, Refractions", URL = "https://www.cg.tuwien.ac.at/research/publications/2013/winklhofer_christoph-2013-RRMR/", } @bachelorsthesis{celarek_adam-2012-rrmro, title = "Merging Ray Tracing and Rasterization in Mixed Reality", author = "Adam Celarek", year = "2012", abstract = "In mixed reality, virtual objects are inserted into a video stream of a real environment. This technique can be used for many applications including marketing, simulations and cultural heritage. Therefore it is important that the images look plausible. Many applications also have real time constraints. With traditional rasterization it is difficult to create realistic reflections and refractions. In ray tracing on the other hand this is a trivial task, but rendering is slow. The solution described in this work uses the graphics card for speeding up ray tracing. Additionally it employs a rasterizer for diffuse surfaces and only traces rays if there is a reflective or refractive surface visible. This works by creating a ray tracing mask using the fast rasterizer in a first step. It holds true for reflective or refractive surfaces and false otherwise. Then all diffuse objects are drawn using the rasterizer. Finally rays are traced on each pixel which is masked as reflective or refractive surface by the ray tracing mask. These rays produce secondary rays which can hit a diffuse surface eventually. In this case the ray tracer takes over the shading. Results show, that our hybrid rendering method allows high quality reflections and refractions while still having interactive frame rates in mixed reality scenarios.", month = nov, address = "Favoritenstrasse 9-11/E193-02, A-1040 Vienna, Austria", school = "Institute of Computer Graphics and Algorithms, Vienna University of Technology ", keywords = "Refraction, OptiX, Augmented Reality, Reflection", URL = "https://www.cg.tuwien.ac.at/research/publications/2012/celarek_adam-2012-rrmro/", } @article{knecht_martin_2012_RSMR, title = "Reciprocal Shading for Mixed Reality", author = "Martin Knecht and Christoph Traxler and Oliver Mattausch and Michael Wimmer", year = "2012", abstract = "In this paper we present a novel plausible rendering method for mixed reality systems, which is useful for many real-life application scenarios, like architecture, product visualization or edutainment. To allow virtual objects to seamlessly blend into the real environment, the real lighting conditions and the mutual illumination effects between real and virtual objects must be considered, while maintaining interactive frame rates. The most important such effects are indirect illumination and shadows cast between real and virtual objects. Our approach combines Instant Radiosity and Differential Rendering. In contrast to some previous solutions, we only need to render the scene once in order to find the mutual effects of virtual and real scenes. In addition, we avoid artifacts like double shadows or inconsistent color bleeding which appear in previous work. The dynamic real illumination is derived from the image stream of a fish-eye lens camera. The scene gets illuminated by virtual point lights, which use imperfect shadow maps to calculate visibility. A sufficiently fast scene reconstruction is done at run-time with Microsoft's Kinect sensor. Thus a time-consuming manual pre-modeling step of the real scene is not necessary. Our results show that the presented method highly improves the illusion in mixed-reality applications and significantly diminishes the artificial look of virtual objects superimposed onto real scenes.", month = nov, issn = "0097-8493", journal = "Computers & Graphics", number = "7", volume = "36", pages = "846--856", keywords = "Differential rendering, Reconstruction, Instant radiosity, Microsoft Kinect, Real-time global illumination, Mixed reality", URL = "https://www.cg.tuwien.ac.at/research/publications/2012/knecht_martin_2012_RSMR/", } @bachelorsthesis{spelitz_stefan-2012-CDTFMR, title = "Color Distribution Transfer For Mixed-Reality Applications", author = "Stefan Spelitz", year = "2012", abstract = "In mixed-reality environments it is essential to integrate virtual objects seamlessly into a real scene. Virtual objects should have similar appearances to those of real objects captured by a video camera. This is useful for many real-life application scenarios, including product advertising and visualization, edutainment systems or for enhancing cultural heritage sites. Typical problems in this domain are to match the current ‘color mood’ of the video camera scene with the colors of virtual (rendered) objects. The color mood depends on the global illumination conditions as well as the hue, saturation or white balance settings of the camera. The aim of this paper is to integrate existing methods of histogram transfers used in the domain of computational photography into mixed-reality environments. These methods allow us to simulate current luminance conditions in the scene and changes in the camera driver settings to apply them onto virtual objects. This thesis contains two fast-running approaches to provide a color mapping between virtual objects and the real scene, which can be used in real-time applications. The results show that these methods increase the immersion of virtual objects in a real scene.", month = oct, address = "Favoritenstrasse 9-11/E193-02, A-1040 Vienna, Austria", school = "Institute of Computer Graphics and Algorithms, Vienna University of Technology ", keywords = "Augmented Reality, Color Transfer", URL = "https://www.cg.tuwien.ac.at/research/publications/2012/spelitz_stefan-2012-CDTFMR/", } @article{knecht_martin_2012_BRDFEstimation, title = "Interactive BRDF Estimation for Mixed-Reality Applications", author = "Martin Knecht and Georg Tanzmeister and Christoph Traxler and Michael Wimmer", year = "2012", abstract = "Recent methods in augmented reality allow simulating mutual light interactions between real and virtual objects. These methods are able to embed virtual objects in a more sophisticated way than previous methods. However, their main drawback is that they need a virtual representation of the real scene to be augmented in the form of geometry and material properties. In the past, this representation had to be modeled in advance, which is very time consuming and only allows for static scenes. We propose a method that reconstructs the surrounding environment and estimates its Bidirectional Reflectance Distribution Function (BRDF) properties at runtime without any preprocessing. By using the Microsoft Kinect sensor and an optimized hybrid CPU & GPU-based BRDF estimation method, we are able to achieve interactive frame rates. The proposed method was integrated into a differential instant radiosity rendering system to demonstrate its feasibility.", month = jun, journal = "Journal of WSCG", volume = "20", number = "1", issn = "1213-6972", pages = "47--56", keywords = "Augmented Reality, BRDF Estimation, Reconstruction", URL = "https://www.cg.tuwien.ac.at/research/publications/2012/knecht_martin_2012_BRDFEstimation/", } @inproceedings{KUE11, title = "BRDF approximation and estimation for Augmented Reality", author = "Patrick K\"{u}htreiber and Martin Knecht and Christoph Traxler", year = "2011", abstract = "In Augmented Reality applications it is important to have a good description of the surfaces of real objects if a consistent shading between real and virtual object is required. If such a description of a surface is not vailable it has to be estimated or approximated. In our paper we will present certain methods that deal with real-time bi-directional reflectance distribution function (BRDF) approximation in augmented reality. Of course an important thing to discuss is whether the applications we present all work in real-time and compute good (and real)looking results. There are different methods on how to achieve this goal. All of the methods we are going to present work via image based lighting and some require a 3D polygonal mesh representation of the object whose BRDF shall be approximated. Some methods estimate the BRDF parameters via error values and provide results at each iteration.", month = oct, organization = ""Gheorghe Asachi" Technical University of Iasi, Faculty of Automatic Control and Computer Engineering", location = "Sinaia, Romania", booktitle = "15th International Conference on System Theory, Control and Computing", pages = "318--324", keywords = "Mixed Reality, BRDF Estimation", URL = "https://www.cg.tuwien.ac.at/research/publications/2011/KUE11/", } @inproceedings{knecht-2011-CBCM, title = "Adaptive Camera-Based Color Mapping For Mixed-Reality Applications", author = "Martin Knecht and Christoph Traxler and Werner Purgathofer and Michael Wimmer", year = "2011", abstract = "We present a novel adaptive color mapping method for virtual objects in mixed-reality environments. In several mixed-reality applications, added virtual objects should be visually indistinguishable from real objects. Recent mixed-reality methods use global-illumination algorithms to approach this goal. However, simulating the light distribution is not enough for visually plausible images. Since the observing camera has its very own transfer function from real-world radiance values to RGB colors, virtual objects look artificial just because their rendered colors do not match with those of the camera. Our approach combines an on-line camera characterization method with a heuristic to map colors of virtual objects to colors as they would be seen by the observing camera. Previous tone-mapping functions were not designed for use in mixed-reality systems and thus did not take the camera-specific behavior into account. In contrast, our method takes the camera into account and thus can also handle changes of its parameters during runtime. The results show that virtual objects look visually more plausible than by just applying tone-mapping operators.", month = oct, isbn = "978-1-4577-2183-0 ", publisher = "IEEE/IET Electronic Library (IEL), IEEE-Wiley eBooks Library, VDE VERLAG Conference Proceedings", note = "E-ISBN: 978-1-4577-2184-7", location = "Basel, Switzerland", booktitle = "Proceedings of the 2011 IEEE International Symposium on Mixed and Augmented Reality (ISMAR 2011)", pages = "165--168", keywords = "Color Matching, Differential Rendering, Mixed Reality, Tone Mapping", URL = "https://www.cg.tuwien.ac.at/research/publications/2011/knecht-2011-CBCM/", } @inproceedings{knecht_martin-2011-FPSPAR, title = "A Framework For Perceptual Studies In Photorealistic Augmented Reality", author = "Martin Knecht and Andreas D\"{u}nser and Christoph Traxler and Michael Wimmer and Raphael Grasset", year = "2011", abstract = "In photorealistic augmented reality virtual objects are integrated in the real world in a seamless visual manner. To obtain a perfect visual augmentation these objects must be rendered indistinguishable from real objects and should be perceived as such. In this paper we propose a research test bed framework to study the different unresolved perceptual issues in photorealistic augmented reality and its application to different disciplines. The framework computes a global illumination approximation in real-time and therefore leverages a new class of experimental research topics.", month = mar, location = "Singapore", editor = "Frank Steinicke, Pete Willemsen", booktitle = "Proceedings of the 3rd IEEE VR 2011 Workshop on Perceptual Illusions in Virtual Environments", pages = "27--32", keywords = "photorealistic augmented reality, real-time global illumination, human perception", URL = "https://www.cg.tuwien.ac.at/research/publications/2011/knecht_martin-2011-FPSPAR/", } @inproceedings{knecht_martin_2010_DIR, title = "Differential Instant Radiosity for Mixed Reality", author = "Martin Knecht and Christoph Traxler and Oliver Mattausch and Werner Purgathofer and Michael Wimmer", year = "2010", abstract = "In this paper we present a novel plausible realistic rendering method for mixed reality systems, which is useful for many real life application scenarios, like architecture, product visualization or edutainment. To allow virtual objects to seamlessly blend into the real environment, the real lighting conditions and the mutual illumination effects between real and virtual objects must be considered, while maintaining interactive frame rates (20-30fps). The most important such effects are indirect illumination and shadows cast between real and virtual objects. Our approach combines Instant Radiosity and Differential Rendering. In contrast to some previous solutions, we only need to render the scene once in order to find the mutual effects of virtual and real scenes. The dynamic real illumination is derived from the image stream of a fish-eye lens camera. We describe a new method to assign virtual point lights to multiple primary light sources, which can be real or virtual. We use imperfect shadow maps for calculating illumination from virtual point lights and have significantly improved their accuracy by taking the surface normal of a shadow caster into account. Temporal coherence is exploited to reduce flickering artifacts. Our results show that the presented method highly improves the illusion in mixed reality applications and significantly diminishes the artificial look of virtual objects superimposed onto real scenes.", month = oct, note = "Best Paper Award!", location = "Seoul", booktitle = "Proceedings of the 2010 IEEE International Symposium on Mixed and Augmented Reality (ISMAR 2010)", pages = "99--107", keywords = "Instant Radiosity, Differential Rendering, Real-time Global Illumination, Mixed Reality", URL = "https://www.cg.tuwien.ac.at/research/publications/2010/knecht_martin_2010_DIR/", }