@article{SCHUETZ-2020-MPC, title = "Fast Out-of-Core Octree Generation for Massive Point Clouds", author = "Markus Sch\"{u}tz and Stefan Ohrhallinger and Michael Wimmer", year = "2020", abstract = "We propose an efficient out-of-core octree generation method for arbitrarily large point clouds. It utilizes a hierarchical counting sort to quickly split the point cloud into small chunks, which are then processed in parallel. Levels of detail are generated by subsampling the full data set bottom up using one of multiple exchangeable sampling strategies. We introduce a fast hierarchical approximate blue-noise strategy and compare it to a uniform random sampling strategy. The throughput, including out-of-core access to disk, generating the octree, and writing the final result to disk, is about an order of magnitude faster than the state of the art, and reaches up to around 6 million points per second for the blue-noise approach and up to around 9 million points per second for the uniform random approach on modern SSDs.", month = nov, journal = "Computer Graphics Forum", volume = "39", number = "7", issn = "1467-8659", doi = "10.1111/cgf.14134", pages = "13", publisher = "John Wiley & Sons, Inc.", pages = "1--13", keywords = "point clouds, point-based rendering, level of detail", URL = "https://www.cg.tuwien.ac.at/research/publications/2020/SCHUETZ-2020-MPC/", } @article{OTEPKA-2020-PPC, title = "Efficient Loading and Visualization of Massive Feature-Richt Point Clouds Without Hierarchical Acceleration Structures", author = "Johannes Otepka and Gottfried Mandlburger and Markus Sch\"{u}tz and Norbert Pfeifer and Michael Wimmer", year = "2020", abstract = "Nowadays, point clouds are the standard product when capturing reality independent of scale and measurement technique. Especially, Dense Image Matching (DIM) and Laser Scanning (LS) are state of the art capturing methods for a great variety of applications producing detailed point clouds up to billions of points. In-depth analysis of such huge point clouds typically requires sophisticated spatial indexing structures to support potentially long-lasting automated non-interactive processing tasks like feature extraction, semantic labelling, surface generation, and the like. Nevertheless, a visual inspection of the point data is often necessary to obtain an impression of the scene, roughly check for completeness, quality, and outlier rates of the captured data in advance. Also intermediate processing results, containing additional per-point computed attributes, may require visual analyses to draw conclusions or to parameterize further processing. Over the last decades a variety of commercial, free, and open source viewers have been developed that can visualise huge point clouds and colorize them based on available attributes. However, they have either a poor loading and navigation performance, visualize only a subset of the points, or require the creation of spatial indexing structures in advance. In this paper, we evaluate a progressive method that is capable of rendering any point cloud that fits in GPU memory in real time without the need of time consuming hierarchical acceleration structure generation. In combination with our multi-threaded LAS and LAZ loaders, we achieve load performance of up to 20 million points per second, display points already while loading, support flexible switching between different attributes, and rendering up to one billion points with visually appealing navigation behaviour. Furthermore, loading times of different data sets for different open source and commercial software packages are analysed.", month = aug, journal = "ISPRS - International Archives of the Photogrammetry, Remote Sensing and Spatial Information Sciences", volume = "XLIII-B2-2020", issn = "1682-1750", doi = "10.5194/isprs-archives-XLIII-B2-2020-293-2020", pages = "293--300", URL = "https://www.cg.tuwien.ac.at/research/publications/2020/OTEPKA-2020-PPC/", } @article{schuetz-2020-PPC, title = "Progressive Real-Time Rendering of One Billion Points Without Hierarchical Acceleration Structures", author = "Markus Sch\"{u}tz and Gottfried Mandlburger and Johannes Otepka and Michael Wimmer", year = "2020", abstract = "Research in rendering large point clouds traditionally focused on the generation and use of hierarchical acceleration structures that allow systems to load and render the smallest fraction of the data with the largest impact on the output. The generation of these structures is slow and time consuming, however, and therefore ill-suited for tasks such as quickly looking at scan data stored in widely used unstructured file formats, or to immediately display the results of point-cloud processing tasks. We propose a progressive method that is capable of rendering any point cloud that fits in GPU memory in real time, without the need to generate hierarchical acceleration structures in advance. Our method supports data sets with a large amount of attributes per point, achieves a load performance of up to 100 million points per second, displays already loaded data in real time while remaining data is still being loaded, and is capable of rendering up to one billion points using an on-the-fly generated shuffled vertex buffer as its data structure, instead of slow-to-generate hierarchical structures. Shuffling is done during loading in order to allow efficiently filling holes with random subsets, which leads to a higher quality convergence behavior. ", month = may, journal = "Computer Graphics Forum", volume = "39", number = "2", issn = "1467-8659", doi = "10.1111/cgf.13911", booktitle = "EUROGRAPHICS", pages = "14", publisher = "John Wiley & Sons Ltd.", pages = "51--64", keywords = "point-based rendering", URL = "https://www.cg.tuwien.ac.at/research/publications/2020/schuetz-2020-PPC/", } @misc{SCHUETZ-2019-PCC, title = "Rendering Point Clouds with Compute Shaders", author = "Markus Sch\"{u}tz and Michael Wimmer", year = "2019", abstract = "We propose a compute shader based point cloud rasterizer with up to 10 times higher performance than classic point-based rendering with the GL_POINT primitive. In addition to that, our rasterizer offers 5 byte depth-buffer precision with uniform or customizable distribution, and we show that it is possible to implement a highquality splatting method that blends together overlapping fragments while still maintaining higher frame-rates than the traditional approach.", month = nov, isbn = "978-1-4503-6943-5/19/11", event = "SIGGRAPH Asia", Conference date = "Poster presented at SIGGRAPH Asia (2019-11)", URL = "https://www.cg.tuwien.ac.at/research/publications/2019/SCHUETZ-2019-PCC/", } @misc{schuetz-2019-LCO, title = "Live Coding of a VR Render Engine in VR", author = "Markus Sch\"{u}tz and Michael Wimmer", year = "2019", abstract = "Live coding in virtual reality allows users to create and modify their surroundings through code without the need to leave the virtual reality environment. Previous work focuses on modifying the scene. We propose an application that allows developers to modify virtually everything at runtime, including the scene but also the render engine, shader code and input handling, using standard desktop IDEs through a desktop mirror. ", month = mar, publisher = "IEEE", location = "Osaka", address = "http://ieeevr.org/2019/", event = "IEEE VR 2019", doi = "https://doi.org/10.1109/VR.2019.8797760", Conference date = "Poster presented at IEEE VR 2019 (2019-03)", note = "1150--1151", pages = "1150 – 1151", keywords = "virtual reality, live coding, VR", URL = "https://www.cg.tuwien.ac.at/research/publications/2019/schuetz-2019-LCO/", } @inproceedings{schuetz-2019-CLOD, title = "Real-Time Continuous Level of Detail Rendering of Point Clouds", author = "Markus Sch\"{u}tz and Katharina Kr\"{o}sl and Michael Wimmer", year = "2019", abstract = "Real-time rendering of large point clouds requires acceleration structures that reduce the number of points drawn on screen. State-of-the art algorithms group and render points in hierarchically organized chunks with varying extent and density, which results in sudden changes of density from one level of detail to another, as well as noticeable popping artifacts when additional chunks are blended in or out. These popping artifacts are especially noticeable at lower levels of detail, and consequently in virtual reality, where high performance requirements impose a reduction in detail. We propose a continuous level-of-detail method that exhibits gradual rather than sudden changes in density. Our method continuously recreates a down-sampled vertex buffer from the full point cloud, based on camera orientation, position, and distance to the camera, in a point-wise rather than chunk-wise fashion and at speeds up to 17 million points per millisecond. As a result, additional details are blended in or out in a less noticeable and significantly less irritating manner as compared to the state of the art. The improved acceptance of our method was successfully evaluated in a user study.", month = mar, publisher = "IEEE", location = "Osaka, Japan", event = "IEEE VR 2019, the 26th IEEE Conference on Virtual Reality and 3D User Interfaces", doi = "10.1109/VR.2019.8798284", booktitle = "2019 IEEE Conference on Virtual Reality and 3D User Interfaces", pages = "103--110", keywords = "point clouds, virtual reality, VR", URL = "https://www.cg.tuwien.ac.at/research/publications/2019/schuetz-2019-CLOD/", } @misc{schuetz-2018-PPC, title = "Progressive Real-Time Rendering of Unprocessed Point Clouds", author = "Markus Sch\"{u}tz and Michael Wimmer", year = "2018", abstract = "Rendering tens of millions of points in real time usually requires either high-end graphics cards, or the use of spatial acceleration structures. We introduce a method to progressively display as many points as the GPU memory can hold in real time by reprojecting what was visible and randomly adding additional points to uniformly converge towards the full result within a few frames. Our method heavily limits the number of points that have to be rendered each frame and it converges quickly and in a visually pleasing way, which makes it suitable even for notebooks with low-end GPUs. The data structure consists of a randomly shuffled array of points that is incrementally generated on-the-fly while points are being loaded. Due to this, it can be used to directly view point clouds in common sequential formats such as LAS or LAZ while they are being loaded and without the need to generate spatial acceleration structures in advance, as long as the data fits into GPU memory.", month = aug, publisher = "ACM", location = "Vancouver, Canada", isbn = "978-1-4503-5817-0/18/08", event = "ACM SIGGRAPH 2018", doi = "10.1145/3230744.3230816", Conference date = "Poster presented at ACM SIGGRAPH 2018 (2018-08-12--2018-08-16)", note = "Article 41--", pages = "Article 41 – ", keywords = "point based rendering, point cloud, LIDAR", URL = "https://www.cg.tuwien.ac.at/research/publications/2018/schuetz-2018-PPC/", } @xmascard{kroesl_x_card_2017, title = "X-Mas Card 2017", author = "Katharina Kr\"{o}sl and Markus Sch\"{u}tz", year = "2017", abstract = "This Christmas we want to illuminate your holidays with luminaires specifically designed for this occasion. The festive scene on this card features semi-translucent luminaires in the shape of christmas trees. The light distribution of the corresponding luminaire model was simulated in LiteMaker,an interactive luminaire development tool that was developed at TU Wien and VRVis. LiteMaker provides interactive editing functionality and very fast high-quality previews of the final physically correct simulated light distribution of a luminaire model. The final scene and light distributions were rendered using our light­ planning software HILITE. Die weihnachtliche Szene auf dieser Karte wird durch Leuchten in Form von Christb\"{a}umen erhellt. F\"{u}r das Lichtdesign der Szene wurde LiteMaker verwendet, ein an der TU Wien und am VRVis entwickeltes interaktives Entwicklungswerkzeug fur Beleuchtungsk\"{o}rper. Durch interaktives Bearbeiten, und sehr schnelle aber dennoch hochwertige physikalisch korrekte Vorschaubilder, erm\"{o}glicht LiteMaker ein rascheres Design von Beleuchtungskonzepten. Die fertige Szene wurde anschlie{\ss}end in unserer Lichtsimuationssoftware HILITE gerendert.", month = dec, URL = "https://www.cg.tuwien.ac.at/research/publications/2017/kroesl_x_card_2017/", } @mastersthesis{SCHUETZ-2016-POT, title = "Potree: Rendering Large Point Clouds in Web Browsers", author = "Markus Sch\"{u}tz", year = "2016", abstract = "This thesis introduces Potree, a web-based renderer for large point clouds. It allows users to view data sets with billions of points, from sources such as LIDAR or photogrammetry, in real time in standard web browsers. One of the main advantages of point cloud visualization in web browser is that it allows users to share their data sets with clients or the public without the need to install third-party applications and transfer huge amounts of data in advance. The focus on large point clouds, and a variety of measuring tools, also allows users to use Potree to look at, analyze and validate raw point cloud data, without the need for a time-intensive and potentially costly meshing step. The streaming and rendering of billions of points in web browsers, without the need to load large amounts of data in advance, is achieved with a hierarchical structure that stores subsamples of the original data at different resolutions. A low resolution is stored in the root node and with each level, the resolution gradually increases. The structure allows Potree to cull regions of the point cloud that are outside the view frustum, and to render distant regions at a lower level of detail. The result is an open source point cloud viewer, which was able to render point cloud data sets of up to 597 billion points, roughly 1.6 terabytes after compression, in real time in a web browser.", month = sep, address = "Favoritenstrasse 9-11/E193-02, A-1040 Vienna, Austria", school = "Institute of Computer Graphics and Algorithms, Vienna University of Technology ", keywords = "point cloud rendering, WebGL, LIDAR", URL = "https://www.cg.tuwien.ac.at/research/publications/2016/SCHUETZ-2016-POT/", } @inproceedings{SCHUETZ-2015-HQP, title = "High-Quality Point Based Rendering Using Fast Single Pass Interpolation", author = "Markus Sch\"{u}tz and Michael Wimmer", year = "2015", abstract = "We present a method to improve the visual quality of point cloud renderings through a nearest-neighbor-like interpolation of points. This allows applications to render points at larger sizes in order to reduce holes, without reducing the readability of fine details due to occluding points. The implementation requires only few modifications to existing shaders, making it eligible to be integrated in software applications without major design changes.", month = sep, location = "Granada, Spain", booktitle = "Proceedings of Digital Heritage 2015 Short Papers", pages = "369--372", keywords = "point-based rendering", URL = "https://www.cg.tuwien.ac.at/research/publications/2015/SCHUETZ-2015-HQP/", } @studentproject{schuetz_markus-2013-pra, title = "Real-time Consistent Meshing", author = "Markus Sch\"{u}tz", year = "2013", abstract = "create a mesh for a point cloud by constructing near-consistent umbrellas at points. near-consistency is ensured by determining the umbrella with lexically minimum edges. the first goal is to find this umbrella with very few evaluations and, if necessary, high probability. the second task is implement the entire process on the GPU and to display the consistent umbrellas.", keywords = "nearest neighbors, meshing, real-time, delaunay triangulation", URL = "https://www.cg.tuwien.ac.at/research/publications/2013/schuetz_markus-2013-pra/", }