# This BibTeX File has been generated by # the Typo3 extension 'Sixpack-4-T3 by Sixten Boeck' # # URL: # Date: 10/23/2016 # Non-Standard BibTex fields are included. # state: 0 = published, 1 = accepted, 2 = submitted, 3 = to be published // if missing, published is assumed # extern,deleted,hidden: 0 = false, 1 = true // if missing, false is assumed # link format: Title Url // separated by a whitespace @article{PSTD2014, author = { Pasewaldt, Sebastian and Semmo, Amir and Trapp, Matthias and D{\"o}llner, J{\"u}rgen }, title = { Multi-Perspective 3D Panoramas }, journal = { International Journal of Geographical Information Science (IJGIS) }, year = { 2014 }, volume = { 28 }, number = { 10 }, pages = { 2030-2051 }, abstract = {
This article presents multi-perspective 3D panoramas that focus on visualizing 3D geovirtual environments (3D GeoVEs) for navigation and exploration tasks. Their key element, a multi-perspective view, seamlessly combines what is seen from multiple viewpoints into a single image. This approach facilitates thepresentation of information for virtual 3D city and landscape models, particularly by reducing occlusions, increasing screen-space utilization, and providing additional context within a single image. We complement multi-perspective views with cartographic visualization techniques to stylize features according to their semantics and highlight important or prioritized information. When combined, both techniques constitute the core implementation of interactive, multi-perspective 3D panoramas. They offer a large number of effective means for visual communication of 3D spatial information, a high degree of customization with respect to cartographic design, and manifold applications in different domains. We discuss design decisions of 3D panoramas for the exploration of and navigation in 3D GeoVEs. We also discuss a preliminary user study that indicates that 3D panoramas are a promising approach for navigation systems using 3D GeoVEs.
}, keywords = { multi-perspective visualization, panorama, focus+context visualization, 3D geovirtual environments, cartographic design }, project = { HPI;NFGII }, doi = { 10.1080/13658816.2014.922686 }, link1 = { http://dx.doi.org/10.1080/13658816.2014.922686 }, sorting = { 1792 } } @article{PTD11, author = { Pasewaldt, Sebastian and Trapp, Matthias and D{\"o}llner, J{\"u}rgen }, title = { Multiscale Visualization of 3D Geovirtual Environments Using View-Dependent Multi-Perspective Views }, journal = { Journal of WSCG }, year = { 2011 }, volume = { 19 }, number = { 3 }, pages = { 111-118 }, month = { 2 }, abstract = { 3D geovirtual environments (GeoVEs), such as virtual 3D city models or landscape models, are essential visualization tools for effectively communicating complex spatial information. In this paper, we discuss how these environments can be visualized using multi-perspective projections based on view-dependent global deformations. Multi-perspective projections enable 3D visualization similar to panoramic maps, increasing overview and information density in depictions of 3D GeoVEs. To make multi-perspective views an effective medium, they must adjust to the orientation of the virtual camera controlled by the user and constrained by the environment. Thus, changing multi-perspective camera configurations typically require the user to manually adapt the global deformation — an error prone, non-intuitive, and often time-consuming task. Our main contribution comprises a concept for the automatic and view-dependent interpolation of different global deformation preset configurations. Applications and systems that implement such view-dependent global deformations, allow users to smoothly and steadily interact with and navigate through multi-perspective 3D GeoVEs. }, keywords = { multi-perspective views, view-dependence, global space deformation, realtime rendering, virtual 3D environments, geovisualization. }, editor = { Václav Skala }, publisher = { UNION Agency – Science Press }, project = { NFG;HPI }, files = { fileadmin/user_upload/fachgebiete/doellner/publications/2011/PTD11/PTD11.pdf }, isbn = { 978-80-86943-84-8 }, issn = { 1213-6072 }, link2 = { Video [YouTube] http://www.youtube.com/watch?v=gzZXTXBwccY }, sorting = { 2816 } } @inproceedings{SDTKDP2016, author = { Semmo, Amir and D{\"u}rschmid, Tobias and Trapp, Matthias and Klingbeil, Mandy and D{\"o}llner, J{\"u}rgen and Pasewaldt, Sebastian }, title = { Interactive Image Filtering with Multiple Levels-of-Control on Mobile Devices }, year = { 2016 }, month = { 12 }, abstract = {
With the continuous development of mobile graphics hardware, interactive high-quality image stylization based on nonlinear filtering is becoming feasible and increasingly used in casual creativity apps. However, these apps often only serve high-level controls to parameterize image filters and generally lack support for low-level (artistic) control, thus automating art creation rather than assisting it. This work presents a GPU-based framework that enables to parameterize image filters at three levels of control: (1) presets followed by (2) global parameter adjustments can be interactively refined by (3) complementary on-screen painting that operates within the filters' parameter spaces for local adjustments. The framework provides a modular XML-based effect scheme to effectively build complex image processing chains-using these interactive filters as building blocks-that can be efficiently processed on mobile devices. Thereby, global and local parameterizations are directed with higher-level algorithmic support to ease the interactive editing process, which is demonstrated by state-of-the-art stylization effects, such as oil paint filtering and watercolor rendering.
}, booktitle = { Proceedings ACM SIGGRAPH Asia Symposium on Mobile Graphics and Interactive Applications }, project = { NFGII }, files = { fileadmin/user_upload/fachgebiete/doellner/publications/2016/SDTKDP2016/asemmo-mgia2016-authors-version.pdf }, doi = { 10.1145/2999508.2999521 }, sorting = { 1 }, state = { 1 } } @inproceedings{PSDS2016, author = { Pasewaldt, Sebastian and Semmo, Amir and D{\"o}llner, J{\"u}rgen and Schlegel, Frank }, title = { BeCasso: Artistic Image Processing and Editing on Mobile Devices }, year = { 2016 }, month = { 12 }, abstract = { BeCasso is a mobile app that enables users to transform photos into high-quality, high-resolution non-photorealistic renditions, such as oil and watercolor paintings, cartoons, and colored pencil drawings, which are inspired by real-world paintings or drawing techniques. In contrast to neuronal network and physically-based approaches, the app employs state-of-the-art nonlinear image filtering. For example, oil paint and cartoon effects are based on smoothed structure information to interactively synthesize renderings with soft color transitions. BeCasso empowers users to easily create aesthetic renderings by implementing a two-fold strategy: First, it provides parameter presets that may serve as a starting point for a custom stylization based on global parameter adjustments. Thereby, users can obtain initial renditions that may be fine-tuned afterwards. Second, it enables local style adjustments: using on-screen painting metaphors, users are able to locally adjust different stylization features, e.g., to vary the level of abstraction, pen, brush and stroke direction or the contour lines. In this way, the app provides tools for both higher-level interaction and low-level control [Isenberg 2016] to serve the different needs of non-experts and digital artists.

Isenberg, T. 2016. Interactive NPAR: What Type of Tools Should We Create? In Proc. NPAR, The Eurographics Association, Goslar, Germany, 89–96 }, affiliation = { Hasso-Plattner-Institut, University of Potsdam, Germany }, booktitle = { Proceedings ACM SIGGRAPH Asia Symposium on Mobile Graphics and Interactive Applications (Demo) }, project = { NFGII }, files = { fileadmin/user_upload/fachgebiete/doellner/publications/2016/PSDS2016/mgia-demo2016_authors_version.pdf }, doi = { 10.1145/2999508.2999518 }, state = { 1 } } @inproceedings{STDDP2016, author = { Semmo, Amir and Trapp, Matthias and D{\"u}rschmid, Tobias and D{\"o}llner, J{\"u}rgen and Pasewaldt, Sebastian }, title = { Interactive Multi-scale Oil Paint Filtering on Mobile Devices }, year = { 2016 }, abstract = {
This work presents an interactive mobile implementation of a filter that transforms images into an oil paint look. At this, a multi-scale approach that processes image pyramids is introduced that uses flow-based joint bilateral upsampling to achieve deliberate levels of abstraction at multiple scales and interactive frame rates. The approach facilitates the implementation of interactive tools that adjust the appearance of filtering effects at run-time, which is demonstrated by an on-screen painting interface for per-pixel parameterization that fosters the casual creativity of non-artists.
}, booktitle = { Proceedings ACM SIGGRAPH Posters }, project = { NFGII }, doi = { 10.1145/2945078.2945120 }, sorting = { 8 } } @inproceedings{STPD2016, author = { Semmo, Amir and Trapp, Matthias and Pasewaldt, Sebastian and D{\"o}llner, J{\"u}rgen }, title = { Interactive Oil Paint Filtering On Mobile Devices }, year = { 2016 }, abstract = {
Image stylization enjoys a growing popularity on mobile devices to foster casual creativity. However, the implementation and provision of high-quality image filters for artistic rendering is still faced by the inherent limitations of mobile graphics hardware such as computing power and memory resources. This work presents a mobile implementation of a filter that transforms images into an oil paint look, thereby highlighting concepts and techniques on how to perform multi-stage nonlinear image filtering on mobile devices. The proposed implementation is based on OpenGL ES and the OpenGL ES shading language, and supports on-screen painting to interactively adjust the appearance in local image regions, e.g., to vary the level of abstraction, brush, and stroke direction. Evaluations of the implementation indicate interactive performance and results that are of similar aesthetic quality than its original desktop variant.
}, booktitle = { Expressive Poster Session }, project = { NFGII }, sorting = { 4 } } @inproceedings{PTD2013, author = { Pasewaldt, Sebastian and Trapp, Matthias and D{\"o}llner, J{\"u}rgen }, title = { Multi-Perspective Detail+Overview Visualization for 3D Building Exploration }, year = { 2013 }, pages = { 57--64 }, month = { 9 }, abstract = { This paper presents a multi-perspective rendering technique that enables detail+overview visualization and interactive exploration of virtual 3D building model. Virtual 3D building models, as main elements of virtual 3D city models, are used in a growing number of application domains, such as geoanalysis, disaster management and architectural planning. Visualization systems for such building models often rely on perspective or orthogonal projections using a single viewpoint. Therefore, the exploration of a complete model requires a user to change the viewpoint multiple times and to memorize the content of each view to obtain a comprehensive mental model. Since this is usually a time-consuming task, which implies context switching, current visualization systems use multiple viewports to simultaneously depict an object from different perspectives. Our approach extends the idea of multiple viewports by combining two linked views for the interactive exploration of virtual 3D buildings model and their facades. In contrast to traditional approaches, we automatically generate a multi-perspective view that simultaneously depicts all facades of the building in one overview image. This facilitates the process of obtaining overviews and supports fast and direct navigation to various points-of-interest. We describe the concept and implementations of our Multiple-Center-of-Projection camera model for real-time multi-perspective image synthesis. Further, we provide insights into different interaction techniques for linked multi-perspective views and outline approaches of future work. }, affiliation = { Hasso-Plattner-Institut, University of Potsdam }, editor = { Silvester Czanner, Wen Tang }, publisher = { The Eurographics Association }, booktitle = { Proceedings of 11th Theory and Practice of Computer Graphics 2013 Conference (TP.CG.2013) }, project = { HPI; NFGII }, files = { fileadmin/user_upload/fachgebiete/doellner/publications/2013/PTD2013/PTD2013.pdf }, isbn = { 978-3-905673-98-2 }, link2 = { Video (Youtube) http://www.youtube.com/watch?v=Ywo4gpx0rE8&feature=share&list=UURf7yK_n8IfSBtpWh8uP0mA }, sorting = { 512 } } @inproceedings{Pasewaldt2012a, author = { Pasewaldt, Sebastian and Semmo, Amir and Trapp, Matthias and D{\"o}llner, J{\"u}rgen }, title = { Towards Comprehensible Digital 3D Maps }, year = { 2012 }, pages = { 261-276 }, month = { 11 }, abstract = { Digital mapping services have become fundamental tools in economy and society to provide domain experts and non-experts with customized, multi-layered map contents. In particular because of the continuous advancements in the acquisition, provision, and visualization of virtual 3D city and landscape models, 3D mapping services, today, represent key components to a growing number of applications, like car navigation, education, or disaster management. However, current systems and applications providing digital 3D maps are faced by drawbacks and limitations, such as occlusion, visual clutter, or insufficient use of screen space, that impact an effective comprehension of geoinformation. To this end, cartographers and computer graphics engineers developed design guidelines, rendering and visualization techniques that aim to increase the effectiveness and expressiveness of digital 3D maps, but whose seamless combination has yet to be achieved. This work discusses potentials of digital 3D maps that are based on combining cartography-oriented rendering techniques and multi-perspective views. For this purpose, a classification of cartographic design principles, visualization techniques, as well as suitable combinations are identified that aid comprehension of digital 3D maps. According to this classification, a prototypical implementation demonstrates the benefits of multi-perspective and non-photorealistic rendering techniques for visualization of 3D map contents. In particular, it enables (1) a seamless combination of cartography-oriented and photorealistic graphic styles while (2) increasing screen-space utilization, and (3) simultaneously directing a viewer’s gaze to important or prioritized information. }, editor = { Markus Jobst }, publisher = { Jobstmedia Management Verlag, Wien }, chapter = { 4 }, booktitle = { Service-Oriented Mapping 2012 (SOMAP2012) }, organization = { Internation Cartographic Association }, project = { NFGII;HPI }, language = { English }, isbn = { 3-9502039-2-3 }, link1 = { Slides http://www.hpi.uni-potsdam.de/fileadmin/user_upload/fachgebiete/doellner/publications/2012/PSTD2012/somap2012_pasewaldt_towards_comprehensible_3D_maps.pdf }, link2 = { Paper http://www.hpi.de/fileadmin/user_upload/fachgebiete/doellner/publications/2012/PSTD2012/PSTD_2012_SOMAP.pdf }, sorting = { 32 } } @inproceedings{EPTD12, author = { Engel, Juri and Pasewaldt, Sebastian and Trapp, Matthias and D{\"o}llner, J{\"u}rgen }, title = { An Immersive Visualization System for Virtual 3D City Models }, year = { 2012 }, month = { 6 }, abstract = { Virtual 3D city models are essential visualization tools for effective communication of complex urban spatial information. Immersive visualization of virtual 3D city models offers an intuitive access to and an effective way of realization of urban spatial information, enabling new collaborative applications and decision-support systems. This paper discusses techniques for and usage of fully immersive environments for visualizing virtual 3D city models by advanced 3D rendering techniques. Fully immersive environments imply a number of specific requirements for both hardware and software, which are discussed in detail. Further, we identify and outline conceptual and technical challenges as well as possible solution approaches by visualization system prototypes for large-scale, fully immersive environments. We evaluate the presented concepts using two application examples and discuss the results. }, affiliation = { Hasso-Plattner-Institut, University of Potsdam, Germany }, publisher = { IEEE GRSS }, booktitle = { 20th International Conference on Geoinformatics (GEOINFORMATICS), 2012 }, project = { NFGII;HPI }, files = { fileadmin/user_upload/fachgebiete/doellner/publications/2012/EPTD12/EPTD12_draft.pdf }, sorting = { 3840 } } @inproceedings{TBPD10, author = { Trapp, Matthias and Beesk, Christian and Pasewaldt, Sebastian and Döllner Jürgen }, title = { Interactive Rendering Techniques for Highlighting in 3D Geovirtual Environments }, year = { 2010 }, month = { 11 }, abstract = { 3D geovirtual environments (GeoVE), such as virtual 3D city and landscape models became an important tool for the visualization of geospatial information. Highlighting is an important component within a visualization framework and is essential for the user interaction within many applications. It enables the user to easily perceive active or selected objects in the context of the current interaction task. With respect to 3D GeoVE, it has a number of applications, such as the visualization of user selections, data base queries, as well as navigation aid by highlighting way points, routes, or to guide the user attention. The geometrical complexity of 3D GeoVE often requires specialized rendering techniques for the real-time image synthesis. This paper presents a framework that unifies various highlighting techniques and is especially suitable for the interactive rendering 3D GeoVE of high geometrical complexity. }, affiliation = { Hasso-Plattner-Institut, University of Potsdam, Germany }, url = { fileadmin/user_upload/fachgebiete/doellner/publications/2010/TBPD10/Highlighting.pdf }, publisher = { Springer }, series = { Lecture Notes in Geoinformation & Cartography }, booktitle = { Proceedings of the 5th 3D GeoInfo Conference }, project = { NFG;HPI }, files = { fileadmin/user_upload/fachgebiete/doellner/publications/2010/TBPD10/Highlighting.pdf }, link2 = { Slides (AuthorStream) http://www.authorstream.com/Presentation/autopilot-629065-interactive-rendering-techniques-for-highlighting/ }, sorting = { 1280 } }