2022
Bujack, Roxana; Bresciani, Etienne; Waters, Jiajia; Schroeder, Will
Topological Segmentation of 2D Vector Fields Journal Article
In: 2022, (LEVIA'22. Leipzig, 06.04.2022 - 07.04.2022).
Abstract | Links | BibTeX | Tags: segmentation, Topology, Vector field
@article{bujack2022topological,
title = {Topological Segmentation of 2D Vector Fields},
author = {Roxana Bujack and Etienne Bresciani and Jiajia Waters and Will Schroeder},
url = {http://www.informatik.uni-leipzig.de/~bujack/2022Levia.pdf},
doi = {https://doi.org/10.36730/2022.1.levia.5},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
abstract = {ector field topology has a long tradition as a visualization tool. The separatrices segment the domain visually into canonical regions in which all streamlines behave qualitatively the same. But application scientists often need more than just a nice image for their data analysis, and, to best of our knowledge, so far no workflow has been proposed to extract the critical points, the associated separatrices, and then provide the induced segmentation on the data level. We present a workflow that computes the segmentation of the domain of a 2D vector field based on its separatrices. We show how it can be used for the extraction of quantitative information about each segment in two applications: groundwater flow and heat exchange.},
note = {LEVIA'22. Leipzig, 06.04.2022 - 07.04.2022},
keywords = {segmentation, Topology, Vector field},
pubstate = {published},
tppubtype = {article}
}
Bujack, Roxana; Zhang, Xinhua; Suk, Tomáš; Rogers, David
Systematic generation of moment invariant bases for 2D and 3D tensor fields Journal Article
In: Pattern Recognition, vol. 123, pp. 108313, 2022, ISSN: 0031-3203.
Abstract | Links | BibTeX | Tags: Basis, Flexible, Generator approach, moment invariants, pattern detection, Rotation invariant, Tensor, Vector
@article{BUJACK2022108313,
title = {Systematic generation of moment invariant bases for 2D and 3D tensor fields},
author = {Roxana Bujack and Xinhua Zhang and Tomáš Suk and David Rogers},
url = {https://www.sciencedirect.com/science/article/pii/S0031320321004933},
doi = {https://doi.org/10.1016/j.patcog.2021.108313},
issn = {0031-3203},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {Pattern Recognition},
volume = {123},
pages = {108313},
abstract = {Moment invariants have been successfully applied to pattern detection tasks in 2D and 3D scalar, vector, and matrix valued data. However so far no flexible basis of invariants exists, i.e., no set that is optimal in the sense that it is complete and independent for every input pattern. In this paper, we prove that a basis of moment invariants can be generated that consists of tensor contractions of not more than two different moment tensors each under the conjecture of the set of all possible tensor contractions to be complete. This result allows us to derive the first generator algorithm that produces flexible bases of moment invariants with respect to orthogonal transformations by selecting a single non-zero moment to pair with all others in these two-factor products. Since at least one non-zero moment can be found in every non-zero pattern, this approach always generates a complete set of descriptors.},
keywords = {Basis, Flexible, Generator approach, moment invariants, pattern detection, Rotation invariant, Tensor, Vector},
pubstate = {published},
tppubtype = {article}
}
Bujack, Roxana; Teti, Emily; Miller, Jonah; Caffrey, Elektra; Turton, Terece
The non-Riemannian nature of perceptual color space Journal Article
In: Proceedings of the National Academy of Sciences, vol. 119, no. 18, pp. e2119753119, 2022.
Abstract | Links | BibTeX | Tags: color perception, color theory
@article{<LineBreak>doi:10.1073/pnas.2119753119,
title = {The non-Riemannian nature of perceptual color space},
author = {Roxana Bujack and Emily Teti and Jonah Miller and Elektra Caffrey and Terece Turton},
url = {https://www.pnas.org/doi/abs/10.1073/pnas.2119753119},
doi = {10.1073/pnas.2119753119},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {Proceedings of the National Academy of Sciences},
volume = {119},
number = {18},
pages = {e2119753119},
abstract = {The scientific community generally agrees on the theory, introduced by Riemann and furthered by Helmholtz and Schrödinger, that perceived color space is not Euclidean but rather, a three-dimensional Riemannian space. We show that the principle of diminishing returns applies to human color perception. This means that large color differences cannot be derived by adding a series of small steps, and therefore, perceptual color space cannot be described by a Riemannian geometry. This finding is inconsistent with the current approaches to modeling perceptual color space. Therefore, the assumed shape of color space requires a paradigm shift. Consequences of this apply to color metrics that are currently used in image and video processing, color mapping, and the paint and textile industries. These metrics are valid only for small differences. Rethinking them outside of a Riemannian setting could provide a path to extending them to large differences. This finding further hints at the existence of a second-order Weber–Fechner law describing perceived differences.},
keywords = {color perception, color theory},
pubstate = {published},
tppubtype = {article}
}
Teti, Emily; Turton, Terece; Miller, Jonah; Bujack, Roxana
Maximum likelihood estimation of difference scaling functions for suprathreshold judgments Journal Article
In: Journal of Vision, vol. 22, no. 10, pp. 9-9, 2022, ISSN: 1534-7362.
Abstract | Links | BibTeX | Tags:
@article{10.1167/jov.22.10.9,
title = {Maximum likelihood estimation of difference scaling functions for suprathreshold judgments},
author = {Emily Teti and Terece Turton and Jonah Miller and Roxana Bujack},
url = {https://jov.arvojournals.org/article.aspx?articleid=2783632},
doi = {10.1167/jov.22.10.9},
issn = {1534-7362},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {Journal of Vision},
volume = {22},
number = {10},
pages = {9-9},
abstract = { Maximum likelihood estimation (MLE) has been used to produce perceptual scales from binary judgments of triads and quadruples. This method relies on Thurstone’s theory of a stochastic perceptual process where the perceived difference of two stimuli is the difference in their perceived strengths. It is possible that the perception of a suprathreshold difference is overestimated when adding smaller differences, a phenomenon referred to as diminishing returns. The current approach to construct a perceptual scale using MLE does not account for this phenomenon. We present a way to model the perception of differences using MLE and Thurstone’s theory, adapted to allow the possibility of diminishing returns. This method is validated using Monte Carlo simulated responses to experimental triads and can correctly model diminishing returns, the absence of diminishing returns, and the opposite of diminishing returns both in the cases when a perceptual scale is known and when the true perceived strengths of the stimuli are unknown. Additionally, this method was applied to empirical data sets to determine its feasibility in investigations of perception. Ultimately, it was found that this analysis allows for more accurate modeling of suprathreshold difference judgments, a more complete understanding of the perceptual processes underlying comparisons, and the evaluation of Thurstone’s theory of difference judgments. },
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Bujack, Roxana
Discussion and Visualization of Distinguished Hyperbolic Trajectories as a Generalization of Critical Points to 2D Time-dependent Flow Proceedings Article
In: 2022 Topological Data Analysis and Visualization (TopoInVis), pp. 59-69, 2022.
Abstract | Links | BibTeX | Tags: flow, Topology, Vector field, visualization
@inproceedings{9975815,
title = {Discussion and Visualization of Distinguished Hyperbolic Trajectories as a Generalization of Critical Points to 2D Time-dependent Flow},
author = {Roxana Bujack},
url = {http://www.informatik.uni-leipzig.de/~bujack/2022topoInVis.pdf},
doi = {10.1109/TopoInVis57755.2022.00013},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {2022 Topological Data Analysis and Visualization (TopoInVis)},
pages = {59-69},
abstract = {Classical vector field topology has proven to be a useful visualization technique for steady flow, but its straightforward application to time-dependent flows lacks physical meaning. Necessary requirements for physical meaningfulness include the results to be objective, i.e., independent of the frame of reference of the observer, and Lagrangian, i.e., that the generalized critical points are trajectories. We analyze whether the theoretical concept of distinguished hyperbolic trajectories provides a physically meaningful generalization to classical critical points and if the existing extraction algorithms correctly compute what has been defined mathematically. We show that both theory and algorithms constitute a significant improvement over previous methods.We further present a method to visualize a time-dependent flow field in the reference frames of distinguished trajectories. The result is easy to interpret because it makes these trajectories look like classical critical points for each instance in time, but it is meaningful because it is Lagrangian and objective.},
keywords = {flow, Topology, Vector field, visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
2021
Kinner, Eric Georg; Lukasczyk, Jonas; Rogers, David; Maciejewski, Ross; Garth, Christoph
Interpolation of Scientific Image Databases Proceedings Article
In: Garth, Christoph; Aurich, Jan C.; Linke, Barbara; Müller, Ralf; Ravani, Bahram; Weber, Gunther H.; Kirsch, Benjamin (Ed.): 2nd International Conference of the DFG International Research Training Group 2057 – Physical Modeling for Virtual Manufacturing (iPMVM 2020), pp. 19:1–19:17, Schloss Dagstuhl -- Leibniz-Zentrum für Informatik, Dagstuhl, Germany, 2021, ISSN: 2190-6807.
Links | BibTeX | Tags: cinema database, image database, image interpolation
@inproceedings{kinner_et_al:OASIcs.iPMVM.2020.19,
title = {Interpolation of Scientific Image Databases},
author = {Eric Georg Kinner and Jonas Lukasczyk and David Rogers and Ross Maciejewski and Christoph Garth},
editor = {Christoph Garth and Jan C. Aurich and Barbara Linke and Ralf Müller and Bahram Ravani and Gunther H. Weber and Benjamin Kirsch},
url = {https://drops.dagstuhl.de/opus/volltexte/2021/13768},
doi = {10.4230/OASIcs.iPMVM.2020.19},
issn = {2190-6807},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
booktitle = {2nd International Conference of the DFG International Research Training Group 2057 – Physical Modeling for Virtual Manufacturing (iPMVM 2020)},
volume = {89},
pages = {19:1--19:17},
publisher = {Schloss Dagstuhl -- Leibniz-Zentrum für Informatik},
address = {Dagstuhl, Germany},
series = {Open Access Series in Informatics (OASIcs)},
keywords = {cinema database, image database, image interpolation},
pubstate = {published},
tppubtype = {inproceedings}
}
Nardini, Pascal; Chen, Min; Bujack, Roxana; Bottinger, Michael; Scheuermann, Gerik
A Testing Environment for Continuous Colormaps Journal Article
In: IEEE Transactions on Visualization and Computer Graphics, vol. 27, no. 2, pp. 1043-1053, 2021.
Abstract | Links | BibTeX | Tags: color perception, scalar analysis, Testing Environment
@article{9216559,
title = {A Testing Environment for Continuous Colormaps},
author = {Pascal Nardini and Min Chen and Roxana Bujack and Michael Bottinger and Gerik Scheuermann},
url = {http://www.informatik.uni-leipzig.de/~bujack/2020Vis.pdf},
doi = {10.1109/TVCG.2020.3028955},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
journal = {IEEE Transactions on Visualization and Computer Graphics},
volume = {27},
number = {2},
pages = {1043-1053},
abstract = {Many computer science disciplines (e.g., combinatorial optimization, natural language processing, and information retrieval) use standard or established test suites for evaluating algorithms. In visualization, similar approaches have been adopted in some areas (e.g., volume visualization), while user testimonies and empirical studies have been the dominant means of evaluation in most other areas, such as designing colormaps. In this paper, we propose to establish a test suite for evaluating the design of colormaps. With such a suite, the users can observe the effects when different continuous colormaps are applied to planar scalar fields that may exhibit various characteristic features, such as jumps, local extrema, ridge or valley lines, different distributions of scalar values, different gradients, different signal frequencies, different levels of noise, and so on. The suite also includes an expansible collection of real-world data sets including the most popular data for colormap testing in the visualization literature. The test suite has been integrated into a web-based application for creating continuous colormaps (https://ccctool.com/), facilitating close inter-operation between design and evaluation processes. This new facility complements traditional evaluation methods such as user testimonies and empirical studies.},
keywords = {color perception, scalar analysis, Testing Environment},
pubstate = {published},
tppubtype = {article}
}
Bujack, Roxana; Tsai, Karen; Morley, Steven; Bresciani, Etienne
Open source vector field topology Journal Article
In: SoftwareX, vol. 15, pp. 100787, 2021, ISSN: 2352-7110.
Abstract | Links | BibTeX | Tags: Critical point, Separatrix, Topology, Vector field
@article{BUJACK2021100787,
title = {Open source vector field topology},
author = {Roxana Bujack and Karen Tsai and Steven Morley and Etienne Bresciani},
url = {http://www.informatik.uni-leipzig.de/~bujack/2021SoftwareX.pdf},
doi = {https://doi.org/10.1016/j.softx.2021.100787},
issn = {2352-7110},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
journal = {SoftwareX},
volume = {15},
pages = {100787},
abstract = {A myriad of physical phenomena, such as fluid flows, magnetic fields, and population dynamics are described by vector fields. More often than not, vector fields are complex and their analysis is challenging. Vector field topology is a powerful analysis technique that consists in identifying the most essential structure of a vector field. Its topological features include critical points and separatrices, which segment the domain into regions of coherent flow behavior, provide a sparse and semantically meaningful representation of the underlying data. However, a broad adoption of this formidable technique has been hampered by the lack of open source software implementing it. The Visualization Toolkit (VTK) now contains the filter vtkVectorFieldTopology that extracts the topological skeleton of 2D and 3D vector fields. This paper describes our implementation and demonstrates its broad applicability with two real-world examples from hydrology and space physics.},
keywords = {Critical point, Separatrix, Topology, Vector field},
pubstate = {published},
tppubtype = {article}
}
Nardini, Pascal; Chen, Min; Böttinger, Michael; Scheuermann, Gerik; Bujack, Roxana
Automatic Improvement of Continuous Colormaps in Euclidean Colorspaces Journal Article
In: Computer Graphics Forum, vol. 40, no. 3, pp. 361-373, 2021.
Abstract | Links | BibTeX | Tags:
@article{https://doi.org/10.1111/cgf.14313,
title = {Automatic Improvement of Continuous Colormaps in Euclidean Colorspaces},
author = {Pascal Nardini and Min Chen and Michael Böttinger and Gerik Scheuermann and Roxana Bujack},
url = {http://www.informatik.uni-leipzig.de/~bujack/2021EuroVis.pdf},
doi = {https://doi.org/10.1111/cgf.14313},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
journal = {Computer Graphics Forum},
volume = {40},
number = {3},
pages = {361-373},
abstract = {Colormapping is one of the simplest and most widely used data visualization methods within and outside the visualization community. Uniformity, order, discriminative power, and smoothness of continuous colormaps are the most important criteria for evaluating and potentially improving colormaps. We present a local and a global automatic optimization algorithm in Euclidean color spaces for each of these design rules in this work. As a foundation for our optimization algorithms, we used the CCC-Tool colormap specification (CMS); each algorithm has been implemented in this tool. In addition to synthetic examples that demonstrate each method's effect, we show the outcome of some of the methods applied to a typhoon simulation.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
2020
Childs, Hank; Ahern, Sean; Ahrens, James; Bauer, Andrew; Bennett, Janine; Bethel, Wes; Bremer, Peer-Timo; Brugger, Eric; Cottam, Joseph; Dorier, Matthieu; Dutta, Soumya; Favre, Jean; Fogal, Thomas; Frey, Steffen; Garth, Christoph; Geveci, Berk; Godoy, William; Hansen, Charles; Harrison, Cyrus; Hentschel, Bernd; Insley, Joseph; Johnson, Chris; Klasky, Scott; Knoll, Aaron; Kress, James; Larsen, Matthew; Lofstead, Jay; Ma, Kwan-Liu; Malakar, Preeti; Meredith, Jeremy; Moreland, Kenneth; Navrátil, Paul; O’Leary, Patrick; Parashar, Manish; Pascucci, Valerio; Patchett, John; Peterka, Tom; Petruzza, Steve; Podhorszki, Norbert; Pugmire, David; Rasquin, Michel; Rizzi, Silvio; Rogers, David; Sane, Sudhanshu; Sauer, Franz; Sisneros, Robert; Shen, Han-Wei; Usher, Will; Vickery, Rhonda; Vishwanath, Venkatram; Wald, Ingo; Wang, Ruonan; Weber, Gunther; Whitlock, Brad; Wolf, Matthew; Yu, Hongfeng; Ziegeler, Sean
A terminology for in situ visualization and analysis systems Journal Article
In: The International Journal of High Performance Computing Applications, vol. 34, no. 6, pp. 676-691, 2020.
Abstract | Links | BibTeX | Tags: in situ processing, scientific visualization
@article{doi:10.1177/1094342020935991,
title = {A terminology for in situ visualization and analysis systems},
author = {Hank Childs and Sean Ahern and James Ahrens and Andrew Bauer and Janine Bennett and Wes Bethel and Peer-Timo Bremer and Eric Brugger and Joseph Cottam and Matthieu Dorier and Soumya Dutta and Jean Favre and Thomas Fogal and Steffen Frey and Christoph Garth and Berk Geveci and William Godoy and Charles Hansen and Cyrus Harrison and Bernd Hentschel and Joseph Insley and Chris Johnson and Scott Klasky and Aaron Knoll and James Kress and Matthew Larsen and Jay Lofstead and Kwan-Liu Ma and Preeti Malakar and Jeremy Meredith and Kenneth Moreland and Paul Navrátil and Patrick O’Leary and Manish Parashar and Valerio Pascucci and John Patchett and Tom Peterka and Steve Petruzza and Norbert Podhorszki and David Pugmire and Michel Rasquin and Silvio Rizzi and David Rogers and Sudhanshu Sane and Franz Sauer and Robert Sisneros and Han-Wei Shen and Will Usher and Rhonda Vickery and Venkatram Vishwanath and Ingo Wald and Ruonan Wang and Gunther Weber and Brad Whitlock and Matthew Wolf and Hongfeng Yu and Sean Ziegeler},
url = {https://dsscale.org/wp-content/uploads/2020/10/ISTP.pdf},
doi = {10.1177/1094342020935991},
year = {2020},
date = {2020-08-14},
journal = {The International Journal of High Performance Computing Applications},
volume = {34},
number = {6},
pages = {676-691},
abstract = {The term “in situ processing” has evolved over the last decade to mean both a specific strategy for visualizing and analyzing data and an umbrella term for a processing paradigm. The resulting confusion makes it difficult for visualization and analysis scientists to communicate with each other and with their stakeholders. To address this problem, a group of over 50 experts convened with the goal of standardizing terminology. This paper summarizes their findings and proposes a new terminology for describing in situ systems. An important finding from this group was that in situ systems are best described via multiple, distinct axes: integration type, proximity, access, division of execution, operation controls, and output type. This paper discusses these axes, evaluates existing systems within the axes, and explores how currently used terms relate to the axes.},
keywords = {in situ processing, scientific visualization},
pubstate = {published},
tppubtype = {article}
}
Zeller, Stephanie; Rogers, David
Visualizing Science: How Color Determines What We See Journal Article
In: 2020, (Published in EOS: Science News by AGU).
Abstract | Links | BibTeX | Tags: color, color theory, mathematical geophysics
@article{szellerEOS2020,
title = {Visualizing Science: How Color Determines What We See},
author = {Stephanie Zeller and David Rogers},
url = {https://eos.org/features/visualizing-science-how-color-determines-what-we-see},
year = {2020},
date = {2020-05-21},
urldate = {2020-05-21},
publisher = {EOS},
abstract = {Color plays a major role in the analysis and communication of scientific information. New tools are helping to improve how color can be applied more accurately and effectively to data.},
note = {Published in EOS: Science News by AGU},
keywords = {color, color theory, mathematical geophysics},
pubstate = {published},
tppubtype = {article}
}
Abram, Gregory; Adhinarayanan, Vignesh; Feng, Wu-chun; Rogers, David; Ahrens, James; Wilson, Luke
ETH: A Framework for the Design-Space Exploration of Extreme-Scale Scientific Visualization Journal Article
In: 2020.
Abstract | Links | BibTeX | Tags:
@article{abrameth,
title = {ETH: A Framework for the Design-Space Exploration of Extreme-Scale Scientific Visualization},
author = {Gregory Abram and Vignesh Adhinarayanan and Wu-chun Feng and David Rogers and James Ahrens and Luke Wilson},
url = {https://dsscale.org/wp-content/uploads/2020/04/ETH-A-Framework-for-the-Design-Space-Exploration.pdf},
year = {2020},
date = {2020-04-07},
abstract = {As high-performance computing (HPC) moves towards the exascale era, large-scale scientific simulations are generating enormous datasets. A variety of techniques (e.g., in-situ methods, data sampling, and compression) have been proposed to help visualize these large datasets under various constraints such as storage, power, and energy. However, evaluating these techniques and understanding the various trade-offs (e.g., performance, efficiency, quality) remains a challenging task.
To enable the investigation and optimization across such tradeoffs, we propose a toolkit for the early-stage exploration of visualization and rendering approaches, job layout, and visualization pipelines. Our framework covers a broader parameter space than existing visualization applications such as ParaView and VisIt. It also promotes the study of simulation-visualization coupling strategies through a data-centric approach, rather than requiring the code itself. Furthermore, with experimentation on an extensively instrumented supercomputer, we study more metrics of interest than was previously possible. Overall, our framework will help to answer important what-if scenarios and trade-off questions in early stages of pipeline development, helping scientists to make informed choices about how to best couple a simulation code with visualization at extreme scale.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
To enable the investigation and optimization across such tradeoffs, we propose a toolkit for the early-stage exploration of visualization and rendering approaches, job layout, and visualization pipelines. Our framework covers a broader parameter space than existing visualization applications such as ParaView and VisIt. It also promotes the study of simulation-visualization coupling strategies through a data-centric approach, rather than requiring the code itself. Furthermore, with experimentation on an extensively instrumented supercomputer, we study more metrics of interest than was previously possible. Overall, our framework will help to answer important what-if scenarios and trade-off questions in early stages of pipeline development, helping scientists to make informed choices about how to best couple a simulation code with visualization at extreme scale.
Orban, Daniel; Banesh, Divya; Banesh, Cameron; Biwer, Christopher; Biswas, Ayan; Saavedra, Ramon; Sweeney, Christine; Sandberg, Richard; Bolme, C A; Ahrens, James; Rogers, David
Cinema:Bandit: a visualization application for beamline science demonstrated on XFEL shock physics experiments Journal Article
In: Journal of Synchrotron Radiation, vol. 27, no. 1, 2020.
Abstract | Links | BibTeX | Tags: continuous workflow, data visualization, database, real time, XFEL
@article{Orban:yn5053,
title = {Cinema:Bandit: a visualization application for beamline science demonstrated on XFEL shock physics experiments},
author = {Daniel Orban and Divya Banesh and Cameron Banesh and Christopher Biwer and Ayan Biswas and Ramon Saavedra and Christine Sweeney and Richard Sandberg and C A Bolme and James Ahrens and David Rogers},
url = {https://doi.org/10.1107/S1600577519014322
https://dsscale.org/wp-content/uploads/2019/12/Cinema-Bandit-a-visualization-application-for-beamline-science-demonstrated-on-XFEL-shock-physics-experiments.pdf},
doi = {10.1107/S1600577519014322},
year = {2020},
date = {2020-01-01},
journal = {Journal of Synchrotron Radiation},
volume = {27},
number = {1},
abstract = {A new visualization tool, Cinema:Bandit, and its demonstration with a continuous workflow for analyzing shock physics experiments and visually exploring the data in real time at X-ray light sources is presented. it Cinema:Bandit is an open-source, web-based visualization application in which the experimenter may explore an aggregated dataset to inform real-time beamline decisions and enable it post hoc data analysis. The tool integrates with experimental workflows that process raw detector data into a simple database format, and it allows visualization of disparate data types, including experimental parameters, line graphs, and images. Use of parallel coordinates accommodates the irregular sampling of experimental parameters and allows for display and filtering of both experimental inputs and measurements. The tool is demonstrated on a dataset of shock-compressed titanium collected at the Matter in Extreme Conditions hutch at the Linac Coherent Light Source.},
keywords = {continuous workflow, data visualization, database, real time, XFEL},
pubstate = {published},
tppubtype = {article}
}
Turton, Terece; Banesh, Divya; Overmyer, Trinity; Sims, Ben; Rogers, David
Enabling Domain Expertise in Scientific Visualization With CinemaScience Journal Article
In: IEEE Computer Graphics and Applications, vol. 40, no. 1, pp. 90-98, 2020, ISSN: 1558-1756, (LA-UR-19-29339).
Abstract | Links | BibTeX | Tags:
@article{Turton:2020:VisViewpoints,
title = {Enabling Domain Expertise in Scientific Visualization With CinemaScience},
author = {Terece Turton and Divya Banesh and Trinity Overmyer and Ben Sims and David Rogers},
url = {https://ieeexplore.ieee.org/document/8951775
https://dsscale.org/wp-content/uploads/2020/01/EnablingDomainExpertiseinScientificVisualizationWithCinemaScience.pdf},
doi = {10.1109/MCG.2019.2954171},
issn = {1558-1756},
year = {2020},
date = {2020-01-01},
journal = {IEEE Computer Graphics and Applications},
volume = {40},
number = {1},
pages = {90-98},
abstract = {Scientific users present unique challenges to visualization researchers. Their high-level tasks require them to apply domain-specific expertise. We introduce a broader audience to the CinemaScience project and demonstrate how CinemaScience enables efficient visualization workflows that can bring in scientist expertise and drive scientific insight.},
note = {LA-UR-19-29339},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Lukasczyk, Jonas; Garth, Christoph; Larsen, Matthew; Engelke, Wito; Hotz, Ingrid; Rogers, David; Ahrens, James; Maciejewski, Ross
Cinema Darkroom: A Deferred Rendering Framework for Large-Scale Datasets Proceedings Article
In: 2020 IEEE 10th Symposium on Large Data Analysis and Visualization (LDAV), pp. 37–41, IEEE 2020.
Links | BibTeX | Tags: cinema, post-processing
@inproceedings{lukasczyk2020cinema,
title = {Cinema Darkroom: A Deferred Rendering Framework for Large-Scale Datasets},
author = {Jonas Lukasczyk and Christoph Garth and Matthew Larsen and Wito Engelke and Ingrid Hotz and David Rogers and James Ahrens and Ross Maciejewski},
url = {https://www.computer.org/csdl/proceedings-article/ldav/2020/846800a037/1pZ0U4aglxe},
doi = {10.1109/LDAV51489.2020.00011},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {2020 IEEE 10th Symposium on Large Data Analysis and Visualization (LDAV)},
pages = {37--41},
organization = {IEEE},
keywords = {cinema, post-processing},
pubstate = {published},
tppubtype = {inproceedings}
}
Bujack, Roxana; Yan, Lin; Hotz, Ingrid; Garth, Christoph; Wang, Bei
State of the Art in Time-Dependent Flow Topology: Interpreting Physical Meaningfulness Through Mathematical Properties Journal Article
In: Computer Graphics Forum, vol. 39, no. 3, pp. 811-835, 2020.
Abstract | Links | BibTeX | Tags:
@article{https://doi.org/10.1111/cgf.14037,
title = {State of the Art in Time-Dependent Flow Topology: Interpreting Physical Meaningfulness Through Mathematical Properties},
author = {Roxana Bujack and Lin Yan and Ingrid Hotz and Christoph Garth and Bei Wang},
url = {http://www.google.com/url?q=http%3A%2F%2Fwww.informatik.uni-leipzig.de%2F~bujack%2F2020Star.pdf&sa=D&sntz=1&usg=AOvVaw3kcbkrjnI7LdvTGfRcw57E},
doi = {https://doi.org/10.1111/cgf.14037},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
journal = {Computer Graphics Forum},
volume = {39},
number = {3},
pages = {811-835},
abstract = {We present a state-of-the-art report on time-dependent flow topology. We survey representative papers in visualization and provide a taxonomy of existing approaches that generalize flow topology from time-independent to time-dependent settings. The approaches are classified based upon four categories: tracking of steady topology, reference frame adaption, pathline classification or clustering, and generalization of critical points. Our unique contributions include introducing a set of desirable mathematical properties to interpret physical meaningfulness for time-dependent flow visualization, inferring mathematical properties associated with selective research papers, and utilizing such properties for classification. The five most important properties identified in the existing literature include coincidence with the steady case, induction of a partition within the domain, Lagrangian invariance, objectivity, and Galilean invariance.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Sane, Sudhanshu; Bujack, Roxana; Garth, Christoph; Childs, Hank
A Survey of Seed Placement and Streamline Selection Techniques Journal Article
In: Computer Graphics Forum, vol. 39, no. 3, pp. 785-809, 2020.
Abstract | Links | BibTeX | Tags: • Human-centered computing → Scientific visualization, CCS Concepts
@article{https://doi.org/10.1111/cgf.14036,
title = {A Survey of Seed Placement and Streamline Selection Techniques},
author = {Sudhanshu Sane and Roxana Bujack and Christoph Garth and Hank Childs},
url = {http://www.google.com/url?q=http%3A%2F%2Fwww.informatik.uni-leipzig.de%2F~bujack%2F2020Sane.pdf&sa=D&sntz=1&usg=AOvVaw1ezsZzR1EQcQw-61qWNOD-},
doi = {https://doi.org/10.1111/cgf.14036},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
journal = {Computer Graphics Forum},
volume = {39},
number = {3},
pages = {785-809},
abstract = {Streamlines are an extensively utilized flow visualization technique for understanding, verifying, and exploring computational fluid dynamics simulations. One of the major challenges associated with the technique is selecting which streamlines to display. Using a large number of streamlines results in dense, cluttered visualizations, often containing redundant information and occluding important regions, whereas using a small number of streamlines could result in missing key features of the flow. Many solutions to select a representative set of streamlines have been proposed by researchers over the past two decades. In this state-of-the-art report, we analyze and classify seed placement and streamline selection (SPSS) techniques used by the scientific flow visualization community. At a high-level, we classify techniques into automatic and manual techniques, and further divide automatic techniques into three strategies: density-based, feature-based, and similarity-based. Our analysis evaluates the identified strategy groups with respect to focus on regions of interest, minimization of redundancy, and overall computational performance. Finally, we consider the application contexts and tasks for which SPSS techniques are currently applied and have potential applications in the future.},
keywords = {• Human-centered computing → Scientific visualization, CCS Concepts},
pubstate = {published},
tppubtype = {article}
}
Tsai, Karen; Bujack, Roxana; Geveci, Berk; Ayachit, Utkarsh; Ahrens, James
Approaches for In Situ Computation of Moments in a Data-Parallel Environment Proceedings Article
In: Frey, Steffen; Huang, Jian; Sadlo, Filip (Ed.): Eurographics Symposium on Parallel Graphics and Visualization, The Eurographics Association, 2020, ISSN: 1727-348X.
Abstract | Links | BibTeX | Tags: • Human-centered computing → Scientific visualization, parallel algorithms, pattern matching
@inproceedings{10.2312:pgv.20201075,
title = {Approaches for In Situ Computation of Moments in a Data-Parallel Environment},
author = {Karen Tsai and Roxana Bujack and Berk Geveci and Utkarsh Ayachit and James Ahrens},
editor = {Steffen Frey and Jian Huang and Filip Sadlo},
url = {(http://www.google.com/url?q=http%3A%2F%2Fwww.informatik.uni-leipzig.de%2F~bujack%2F2020EGPGV.pdf&sa=D&sntz=1&usg=AOvVaw2HaZNoL1L9jYRO0br69mHW)},
doi = {10.2312/pgv.20201075},
issn = {1727-348X},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {Eurographics Symposium on Parallel Graphics and Visualization},
publisher = {The Eurographics Association},
abstract = {Feature-driven in situ data reduction can overcome the I/O bottleneck that large simulations face in modern supercomputer architectures in a semantically meaningful way. In this work, we make use of pattern detection as a black box detector of arbitrary feature templates of interest. In particular, we use moment invariants because they allow pattern detection independent of the specific orientation of a feature. We provide two open source implementations of a rotation invariant pattern detection algorithm for high performance computing (HPC) clusters with a distributed memory environment. The first one is a straightforward integration approach. The second one makes use of the Fourier transform and the Cross-Correlation Theorem. In this paper, we will compare the two approaches with respect to performance and flexibility and showcase results of the in situ integration with real world simulation code.},
keywords = {• Human-centered computing → Scientific visualization, parallel algorithms, pattern matching},
pubstate = {published},
tppubtype = {inproceedings}
}
2019
Pulido, Jesus; Lukic, Zarija; Thorman, Paul; Zheng, Caixia; Ahrens, James; Hamann, Bernd
Data Reduction Using Lossy Compression for Cosmology and Astrophysics Workflows Journal Article
In: Journal of Physics: Conference Series, vol. 1290, pp. 012008, 2019.
@article{Pulido_2019,
title = {Data Reduction Using Lossy Compression for Cosmology and Astrophysics Workflows},
author = {Jesus Pulido and Zarija Lukic and Paul Thorman and Caixia Zheng and James Ahrens and Bernd Hamann},
url = {https://doi.org/10.1088%2F1742-6596%2F1290%2F1%2F012008},
doi = {10.1088/1742-6596/1290/1/012008},
year = {2019},
date = {2019-10-01},
journal = {Journal of Physics: Conference Series},
volume = {1290},
pages = {012008},
publisher = {IOP Publishing},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
De, Soumi; Biwer, Christopher M.; Capano, Collin D.; Nitz, Alexander H.; Brown, Duncan A.
Posterior samples of the parameters of binary black holes from Advanced LIGO, Virgo’s second observing run Journal Article
In: Scientific Data, vol. 6, pp. 81, 2019.
Links | BibTeX | Tags: Data Analysis, Gravitation
@article{De2019,
title = {Posterior samples of the parameters of binary black holes from Advanced LIGO, Virgo’s second observing run},
author = {Soumi De and Christopher M. Biwer and Collin D. Capano and Alexander H. Nitz and Duncan A. Brown },
url = {https://www.nature.com/articles/s41597-019-0086-6},
year = {2019},
date = {2019-06-03},
journal = {Scientific Data},
volume = {6},
pages = {81},
keywords = {Data Analysis, Gravitation},
pubstate = {published},
tppubtype = {article}
}
Biwer, Christoper M.; Capano, Collin; De, Soumi; Cabero, Miriam; Brown, Duncan; Nitz, Alexander; Raymond, Vivien
PyCBC Inference: A Python-based Parameter Estimation Toolkit for Compact Binary Coalescence Signals Journal Article
In: Publications of the Astronomical Society of the Pacific, vol. 131, no. 996, pp. 024503, 2019.
Abstract | Links | BibTeX | Tags: Data Analysis, gravitational waves - methods, statistical
@article{1538-3873-131-996-024503,
title = {PyCBC Inference: A Python-based Parameter Estimation Toolkit for Compact Binary Coalescence Signals},
author = {Christoper M. Biwer and Collin Capano and Soumi De and Miriam Cabero and Duncan Brown and Alexander Nitz and Vivien Raymond},
url = {http://stacks.iop.org/1538-3873/131/i=996/a=024503},
year = {2019},
date = {2019-01-01},
journal = {Publications of the Astronomical Society of the Pacific},
volume = {131},
number = {996},
pages = {024503},
abstract = {We introduce new modules in the open-source PyCBC gravitational-wave astronomy toolkit that implement Bayesian inference for compact-object binary mergers. We review the Bayesian inference methods implemented and describe the structure of the modules. We demonstrate that the PyCBC Inference modules produce unbiased estimates of the parameters of a simulated population of binary black hole mergers. We show that the parameters’ posterior distributions obtained using our new code agree well with the published estimates for binary black holes in the first Advanced LIGO–Virgo observing run.},
keywords = {Data Analysis, gravitational waves - methods, statistical},
pubstate = {published},
tppubtype = {article}
}
Samsel, Francesca; Wolfram, Phillip; Bares, Annie; Turton, Terece; Bujack, Roxana
Colormapping resources and strategies for organized intuitive environmental visualization Journal Article
In: Environmental Earth Sciences, vol. 78, no. 9, pp. 269, 2019, ISSN: 1866-6280, (LA-UR-19-20060).
Abstract | Links | BibTeX | Tags:
@article{samsel2019colormapping,
title = {Colormapping resources and strategies for organized intuitive environmental visualization},
author = {Francesca Samsel and Phillip Wolfram and Annie Bares and Terece Turton and Roxana Bujack},
url = {https://doi.org/10.1007/s12665-019-8237-9},
doi = {10.1007/s12665-019-8237-9},
issn = {1866-6280},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
journal = {Environmental Earth Sciences},
volume = {78},
number = {9},
pages = {269},
abstract = {Visualizations benefit from the use of intuitive organized color application, enabling a clearer understanding and communication. In this paper, we apply the concept of semantic color association to the generation of thematic colormaps for the environmental sciences in combination with principals of artistic color theory to expand feature resolution and create visual hierarchies within a visualization. In particular, we provide sets of color scales, colormaps and color organization guidance for semantically aligned water, atmosphere, land, and vegetation visualization. Strategies for directing attention via saturation levels and saturation sets of colormaps enable deployment of these techniques. All are publicly available online and accompanied by tools and strategy guidance.},
note = {LA-UR-19-20060},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Dutta, Soumya; Biswas, Ayan; Ahrens, James
Multivariate Pointwise Information-Driven Data Sampling and Visualization Journal Article
In: Entropy, vol. 21, no. 7, 2019, ISSN: 1099-4300, (LA-UR-19-24243).
Abstract | Links | BibTeX | Tags: data reduction, multivariate sampling, query-driven visualization
@article{e21070699,
title = {Multivariate Pointwise Information-Driven Data Sampling and Visualization},
author = {Soumya Dutta and Ayan Biswas and James Ahrens},
url = {https://www.mdpi.com/1099-4300/21/7/699},
doi = {10.3390/e21070699},
issn = {1099-4300},
year = {2019},
date = {2019-01-01},
journal = {Entropy},
volume = {21},
number = {7},
abstract = {With increasing computing capabilities of modern supercomputers, the size of the data generated from the scientific simulations is growing rapidly. As a result, application scientists need effective data summarization techniques that can reduce large-scale multivariate spatiotemporal data sets while preserving the important data properties so that the reduced data can answer domain-specific queries involving multiple variables with sufficient accuracy. While analyzing complex scientific events, domain experts often analyze and visualize two or more variables together to obtain a better understanding of the characteristics of the data features. Therefore, data summarization techniques are required to analyze multi-variable relationships in detail and then perform data reduction such that the important features involving multiple variables are preserved in the reduced data. To achieve this, in this work, we propose a data sub-sampling algorithm for performing statistical data summarization that leverages pointwise information theoretic measures to quantify the statistical association of data points considering multiple variables and generates a sub-sampled data that preserves the statistical association among multi-variables. Using such reduced sampled data, we show that multivariate feature query and analysis can be done effectively. The efficacy of the proposed multivariate association driven sampling algorithm is presented by applying it on several scientific data sets.},
note = {LA-UR-19-24243},
keywords = {data reduction, multivariate sampling, query-driven visualization},
pubstate = {published},
tppubtype = {article}
}
Dutta, Soumya; Brady, Riley; Maltrud, Mathew; Wolfram, Philip; Bujack, Roxana
Leveraging Lagrangian Analysis for Discriminating Nutrient Origins Proceedings Article
In: Bujack, Roxana; Feige, Kathrin; Rink, Karsten; Zeckzer, Dirk (Ed.): Workshop on Visualisation in Environmental Sciences (EnvirVis), pp. 17-24, The Eurographics Association, 2019, ISBN: 978-3-03868-086-4, (LA-UR-19-22455).
Links | BibTeX | Tags: human-centered computing, scientific visualization
@inproceedings{N20103:2019,
title = {Leveraging Lagrangian Analysis for Discriminating Nutrient Origins},
author = {Soumya Dutta and Riley Brady and Mathew Maltrud and Philip Wolfram and Roxana Bujack},
editor = {Roxana Bujack and Kathrin Feige and Karsten Rink and Dirk Zeckzer},
url = {https://dsscale.org/wp-content/uploads/2019/07/leveraging-lagrangian-analysis-for-discriminating-nutrient-origins.pdf},
doi = {10.2312/envirvis.20191100},
isbn = {978-3-03868-086-4},
year = {2019},
date = {2019-01-01},
booktitle = {Workshop on Visualisation in Environmental Sciences (EnvirVis)},
pages = {17-24},
publisher = {The Eurographics Association},
note = {LA-UR-19-22455},
keywords = {human-centered computing, scientific visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
Baker, Allison; Hammerling, Dorit; Turton, Terece
Evaluating Image Quality Measures to Assess the Impact of Lossy Data Compression Applied to Climate Simulation Data Journal Article
In: Computer Graphics Forum, 2019, ISSN: 1467-8659, (LA-UR-19-22420).
Links | BibTeX | Tags: data compaction and compression, feature evaluation
@article{10.1111:cgf.13707,
title = {Evaluating Image Quality Measures to Assess the Impact of Lossy Data Compression Applied to Climate Simulation Data},
author = {Allison Baker and Dorit Hammerling and Terece Turton},
url = {https://dsscale.org/wp-content/uploads/2019/07/BakerHammerlingTurtonEuroVis2019-EvalLossyCompression.pdf},
doi = {10.1111/cgf.13707},
issn = {1467-8659},
year = {2019},
date = {2019-01-01},
journal = {Computer Graphics Forum},
publisher = {The Eurographics Association and John Wiley & Sons Ltd.},
note = {LA-UR-19-22420},
keywords = {data compaction and compression, feature evaluation},
pubstate = {published},
tppubtype = {article}
}
Banesh, Divya; Peterson, Mark; Wendelberger, Joanne; Ahrens, James; Hamann, Bernd
Comparison of piecewise linear change point detection with traditional analytical methods for ocean and climate data Journal Article
In: 2019.
Abstract | Links | BibTeX | Tags: change point detection, Fourier transform, ocean data, Wavelets
@article{8823794b,
title = {Comparison of piecewise linear change point detection with traditional analytical methods for ocean and climate data},
author = {Divya Banesh and Mark Peterson and Joanne Wendelberger and James Ahrens and Bernd Hamann},
url = {https://link.springer.com/article/10.1007/s12665-019-8636-y?wt_mc=Internal.Event.1.SEM.ArticleAuthorIncrementalIssue&utm_source=ArticleAuthorIncrementalIssue&utm_medium=email&utm_content=AA_en_06082018&ArticleAuthorIncrementalIssue_20191104},
year = {2019},
date = {2019-01-01},
abstract = {Earth's atmosphere and oceans are largely determined by periodic patterns of solar radiation, from daily and seasonal, to orbital variations over thousands of years. Dynamical processes alter these cycles with feedbacks and delays, so that the observed climate response is a combination of cyclical features and sudden regime changes. A primary example is the shift from a glacial (ice age) state to interglacial, which is driven by a 100-thousand year orbital cycle, while the transition occurs over a period of hundreds of years. Traditional methods of statistical analysis such as Fourier and wavelet transforms are very good at describing cyclical behavior but lack any characterization of singular events and regime changes. More recently, researchers have tested techniques in the statistical discipline of change point detection. This paper explores the unique advantages of a piecewise linear regression change point detection algorithm to identify events, regime shifts, and the direction of cyclical trends in geophysical data. It evaluates the reasons for choosing this particular change detection algorithm over other techniques by applying the technique to both observational and model data sets. A comparison of the proposed change detection algorithm to the more established statistical techniques shows the benefits and drawbacks of each method.},
keywords = {change point detection, Fourier transform, ocean data, Wavelets},
pubstate = {published},
tppubtype = {article}
}
Maack, Robin; Rogers, David; Hagen, Hans; Gillmann, Christina
Exploring Cinema Databases using multi-dimensional Image Measures Journal Article
In: 2019.
Abstract | Links | BibTeX | Tags:
@article{maackexploring,
title = {Exploring Cinema Databases using multi-dimensional Image Measures},
author = {Robin Maack and David Rogers and Hans Hagen and Christina Gillmann},
url = {https://dsscale.org/wp-content/uploads/2020/03/LEVIA19_paper_2.pdf},
year = {2019},
date = {2019-01-01},
booktitle = {LEVIA 2019: Leipzig Symposium on Visualization in Applications},
abstract = {Exa-scale simulations can be hard to analyze because it is nearly impossible to store all computed time-steps and other parameters. The Cinema Database provides a storage saving solution, that captures images of each simulation time-step from a variety of camera angles. Still, the resulting number of images can be overwhelming and it is hard to find interesting images and features for further analysis. We present a zoom based approach where users can utilize arbitrary image measures to explore interesting images and further analyze their behavior in detail. We showed the effectiveness of our approach by providing two real world Cinema datasets.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Nardini, Pascal; Chen, Min; Samsel, Francesca; Bujack, Roxana; Bottinger, Michael; Scheuermann, Gerik
The making of continuous colormaps Journal Article
In: IEEE transactions on visualization and computer graphics, vol. 27, no. 6, pp. 3048–3063, 2019.
Abstract | Links | BibTeX | Tags: ccc-tool, color perception, colormaps
@article{nardini2019making,
title = {The making of continuous colormaps},
author = {Pascal Nardini and Min Chen and Francesca Samsel and Roxana Bujack and Michael Bottinger and Gerik Scheuermann},
url = {http://www.informatik.uni-leipzig.de/~bujack/2020ccc.pdf},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
journal = {IEEE transactions on visualization and computer graphics},
volume = {27},
number = {6},
pages = {3048--3063},
publisher = {IEEE},
abstract = {Continuous colormaps are integral parts of many visualization techniques, such as heat-maps, surface plots, and flow visualization. Despite that the critiques of rainbow colormaps have been around and well-acknowledged for three decades, rainbow colormaps are still widely used today. One reason behind the resilience of rainbow colormaps is the lack of tools for users to create a continuous colormap that encodes semantics specific to the application concerned. In this paper, we present a web-based software system, CCC-Tool (short for Charting Continuous Colormaps) under the URL https://ccctool.com, for creating, editing, and analyzing such application-specific colormaps. We introduce the notion of “colormap specification (CMS)” that maintains the essential semantics required for defining a color mapping scheme. We provide users with a set of advanced utilities for constructing CMS’s with various levels of complexity, examining their quality attributes using different plots, and exporting them to external application software. We present two case studies, demonstrating that the CCC-Tool can help domain scientists as well as visualization experts in designing semantically-rich colormaps.},
keywords = {ccc-tool, color perception, colormaps},
pubstate = {published},
tppubtype = {article}
}
2018
Gospodnetic, Petra; Banesh, Divya; Wolfram, Phillip; Petersen, Mark; Hagen, Hans; Ahrens, James; Rauhut, Markus
Ocean Current Segmentation at Different Depths and Correlation with Temperature in a MPAS-Ocean Simulation Proceedings Article
In: 2018 IEEE Scientific Visualization Conference (SciVis), pp. 62-66, 2018.
Abstract | Links | BibTeX | Tags: image processing, ocean current segmentation, ocean current visualization, ocean currents
@inproceedings{8823794,
title = {Ocean Current Segmentation at Different Depths and Correlation with Temperature in a MPAS-Ocean Simulation},
author = {Petra Gospodnetic and Divya Banesh and Phillip Wolfram and Mark Petersen and Hans Hagen and James Ahrens and Markus Rauhut},
url = {https://ieeexplore.ieee.org/abstract/document/8823794
https://dsscale.org/wp-content/uploads/2019/10/08823794_optimized.pdf},
doi = {10.1109/SciVis.2018.8823794},
year = {2018},
date = {2018-10-01},
booktitle = {2018 IEEE Scientific Visualization Conference (SciVis)},
pages = {62-66},
abstract = {When analyzing and interpreting results of an ocean simulation, the prevalent method in oceanography is to visualize the complete dataset. However, this can lead to data being missed or misinterpreted due to the distraction caused by the extraneous data of the simulation. Furthermore, when the data stretches over many layers in depth or over numerous time-steps, the ability to track attributes such as ocean currents becomes difficult due to the complexity of the data. We propose an image processing approach to simulation preprocessing for visualization purposes, which offers automation of ocean current tracking within a simulation and ocean current segmentation from the rest of the simulation data. Using the proposed approach, it is possible to automatically identify the most scientifically-relevant streams, extract them from the rest of the simulation and correlate their behavior with other simulation parameters.},
keywords = {image processing, ocean current segmentation, ocean current visualization, ocean currents},
pubstate = {published},
tppubtype = {inproceedings}
}
Abram, Greg; Navrátil, Paul; Grossett, Pascal; Rogers, David; Ahrens, James
Galaxy: Asynchronous Ray Tracing for Large High-Fidelity Visualization Proceedings Article
In: 2018 IEEE 8th Symposium on Large Data Analysis and Visualization (LDAV), pp. 72-76, 2018, ISSN: null, (LA-UR-18-26088).
Abstract | Links | BibTeX | Tags: computer graphics, human-centered computing, ray tracing, rendering, visualization
@inproceedings{8739241,
title = {Galaxy: Asynchronous Ray Tracing for Large High-Fidelity Visualization},
author = {Greg Abram and Paul Navrátil and Pascal Grossett and David Rogers and James Ahrens},
doi = {10.1109/LDAV.2018.8739241},
issn = {null},
year = {2018},
date = {2018-10-01},
booktitle = {2018 IEEE 8th Symposium on Large Data Analysis and Visualization (LDAV)},
pages = {72-76},
abstract = {We present Galaxy, a fully asynchronous distributed parallel rendering engine geared towards using full global illumination for large-scale visualization. Galaxy provides performant distributed rendering of complex lighting and material models, particularly those that require ray traversal across nodes. Our design is favorable for tightly-coupled in situ scenarios, where data remains on simulation nodes. By employing asynchronous framebuffer updates and a novel subtractive lighting model, we achieve acceptable image quality from the first ray generation, and improve quality throughout the render epoch. On simulated in situ rendering tasks, Galaxy outperforms the current best-of-breed scientific ray tracer by over 3× for distributed geometric and particle data, while providing expanded rendering capability for global illumination and complex materials.},
note = {LA-UR-18-26088},
keywords = {computer graphics, human-centered computing, ray tracing, rendering, visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
De, Soumi; Finstad, Daniel; Lattimer, James; Brown, Duncan; Berger, Edo; Biwer, Christopher
Tidal Deformabilities and Radii of Neutron Stars from the Observation of GW170817 Journal Article
In: Phys. Rev. Lett., vol. 121, pp. 091102, 2018.
Links | BibTeX | Tags: Cosmology & Astrophysics, Gravitation
@article{PhysRevLett.121.091102,
title = {Tidal Deformabilities and Radii of Neutron Stars from the Observation of GW170817},
author = {Soumi De and Daniel Finstad and James Lattimer and Duncan Brown and Edo Berger and Christopher Biwer},
url = {https://link.aps.org/doi/10.1103/PhysRevLett.121.091102},
doi = {10.1103/PhysRevLett.121.091102},
year = {2018},
date = {2018-08-01},
journal = {Phys. Rev. Lett.},
volume = {121},
pages = {091102},
publisher = {American Physical Society},
keywords = {Cosmology & Astrophysics, Gravitation},
pubstate = {published},
tppubtype = {article}
}
Cabero, Miriam; Capano, Collin; Fischer-Birnholtz, Ofek; Krishnan, Badri; Nielsen, Alex; Nitz, Alexander; Biwer, Christopher
Observational tests of the black hole area increase law Journal Article
In: Phys. Rev. D, vol. 97, pp. 124069, 2018.
Links | BibTeX | Tags: Cosmology & Astrophysics, Gravitation
@article{PhysRevD.97.124069,
title = {Observational tests of the black hole area increase law},
author = {Miriam Cabero and Collin Capano and Ofek Fischer-Birnholtz and Badri Krishnan and Alex Nielsen and Alexander Nitz and Christopher Biwer},
url = {https://link.aps.org/doi/10.1103/PhysRevD.97.124069},
doi = {10.1103/PhysRevD.97.124069},
year = {2018},
date = {2018-06-01},
journal = {Phys. Rev. D},
volume = {97},
pages = {124069},
publisher = {American Physical Society},
keywords = {Cosmology & Astrophysics, Gravitation},
pubstate = {published},
tppubtype = {article}
}
Vogel, Sven; Biwer, Chris; Rogers, David; Ahrens, James; Hackenberg, Robert; Onken, Drew; Zhang, Jianzhong
Interactive visualization of multi-data-set Rietveld analyses using Cinema:Debye-Scherrer Journal Article
In: Journal of Applied Crystallography, vol. 51, no. 3, pp. 943–951, 2018.
Abstract | Links | BibTeX | Tags: automated analysis, diffraction data analysis, visualization
@article{Vogel:ks5597,
title = {Interactive visualization of multi-data-set Rietveld analyses using \textit{Cinema:Debye-Scherrer}},
author = {Sven Vogel and Chris Biwer and David Rogers and James Ahrens and Robert Hackenberg and Drew Onken and Jianzhong Zhang},
url = {https://doi.org/10.1107/S1600576718003989},
doi = {10.1107/S1600576718003989},
year = {2018},
date = {2018-06-01},
journal = {Journal of Applied Crystallography},
volume = {51},
number = {3},
pages = {943--951},
abstract = {A tool named \textit{Cinema:Debye-Scherrer} to visualize the results of a series of Rietveld analyses is presented. The multi-axis visualization of the high-dimensional data sets resulting from powder diffraction analyses allows identification of analysis problems, prediction of suitable starting values, identification of gaps in the experimental parameter space and acceleration of scientific insight from the experimental data. The tool is demonstrated with analysis results from 59 U—Nb alloy samples with different compositions, annealing times and annealing temperatures as well as with a high-temperature study of the crystal structure of CsPbBr_{3}. A script to extract parameters from a series of Rietveld analyses employing the widely used \textit{GSAS} Rietveld software is also described. Both software tools are available for download.},
keywords = {automated analysis, diffraction data analysis, visualization},
pubstate = {published},
tppubtype = {article}
}
Pulido, Jesus; Livescu, Daniel; Kanov, Kalin; Burns, Randal; Canada, Curtis; Ahrens, James; Hamann, Bernd
Remote Visual Analysis of Large Turbulence Databases at Multiple Scales Journal Article
In: Journal of Parallel and Distributed Computing, 2018, ISBN: 0743-7315, (LA-UR-17-20757).
Abstract | Links | BibTeX | Tags: Computer Science, data reduction, Databases, Distributed Systems, Mathematics and Computing, remote visualization, turbulence, Wavelets
@article{info:lanl-repo/lareport/LA-UR-17-20757,
title = {Remote Visual Analysis of Large Turbulence Databases at Multiple Scales},
author = {Jesus Pulido and Daniel Livescu and Kalin Kanov and Randal Burns and Curtis Canada and James Ahrens and Bernd Hamann},
url = {https://www.sciencedirect.com/science/article/pii/S0743731518303927},
doi = {https://doi.org/10.1016/j.jpdc.2018.05.011},
isbn = {0743-7315},
year = {2018},
date = {2018-01-01},
journal = {Journal of Parallel and Distributed Computing},
abstract = {The remote analysis and visualization of raw large turbulence datasets is challenging. Current accurate direct numerical simulations (DNS) of turbulent flows generate datasets with billions of points per time-step and several thousand time-steps per simulation. Until recently, the analysis and visualization of such datasets was restricted to scientists with access to large supercomputers. The public Johns Hopkins Turbulence database simplifies access to multi-terabyte turbulence datasets and facilitates the computation of statistics and extraction of features through the use of commodity hardware. We present a framework designed around wavelet-based compression for high-speed visualization of large datasets and methods supporting multi-resolution analysis of turbulence. By integrating common technologies, this framework enables remote access to tools available on supercomputers and over 230 terabytes of DNS data over the Web. The database toolset is expanded by providing access to exploratory data analysis tools, such as wavelet decomposition capabilities and coherent feature extraction.},
note = {LA-UR-17-20757},
keywords = {Computer Science, data reduction, Databases, Distributed Systems, Mathematics and Computing, remote visualization, turbulence, Wavelets},
pubstate = {published},
tppubtype = {article}
}
Finstad, Daniel; De, Soumi; Brown, Duncan; Berger, Edo; Biwer, Christopher
Measuring the Viewing Angle of GW170817 with Electromagnetic and Gravitational Waves Journal Article
In: The Astrophysical Journal Letters, vol. 860, no. 1, pp. L2, 2018.
Abstract | Links | BibTeX | Tags: Cosmology & Astrophysics, Gravitation
@article{2041-8205-860-1-L2,
title = {Measuring the Viewing Angle of GW170817 with Electromagnetic and Gravitational Waves},
author = {Daniel Finstad and Soumi De and Duncan Brown and Edo Berger and Christopher Biwer},
url = {http://stacks.iop.org/2041-8205/860/i=1/a=L2},
year = {2018},
date = {2018-01-01},
journal = {The Astrophysical Journal Letters},
volume = {860},
number = {1},
pages = {L2},
abstract = {The joint detection of gravitational waves (GWs) and electromagnetic (EM) radiation from the binary neutron star merger GW170817 ushered in a new era of multi-messenger astronomy. Joint GW–EM observations can be used to measure the parameters of the binary with better precision than either observation alone. Here, we use joint GW–EM observations to measure the viewing angle of GW170817, the angle between the binary’s angular momentum and the line of sight. We combine a direct measurement of the distance to the host galaxy of GW170817 (NGC 4993) of 40.7 ± 2.36 Mpc with the Laser Interferometer Gravitational-wave Observatory (LIGO)/Virgo GW data and find that the viewing angle is 32_{-13}^{+10}± 1.7 degrees (90% confidence, statistical, and systematic errors). We place a conservative lower limit on the viewing angle of ≥13°, which is robust to the choice of prior. This measurement provides a constraint on models of the prompt γ -ray and radio/X-ray afterglow emission associated with the merger; for example, it is consistent with the off-axis viewing angle inferred for a structured jet model. We provide for the first time the full posterior samples from Bayesian parameter estimation of LIGO/Virgo data to enable further analysis by the community.},
keywords = {Cosmology & Astrophysics, Gravitation},
pubstate = {published},
tppubtype = {article}
}
Ware, Colin; Turton, Terece; Bujack, Roxana; Samsel, Francesca; Shrivastava, Piyush; Rogers, David
Measuring and Modeling the Feature Detection Threshold Functions of Colormaps Journal Article
In: IEEE Transactions on Visualization and Computer Graphics, pp. 1-1, 2018, ISSN: 1077-2626, (LA-UR-18-21476).
Abstract | Links | BibTeX | Tags: color perception, Colormapping, feature extraction, frequency measurements, Image color analysis, sea measurements, sensitivity, spatial resolution, Task analysis
@article{8413174,
title = {Measuring and Modeling the Feature Detection Threshold Functions of Colormaps},
author = {Colin Ware and Terece Turton and Roxana Bujack and Francesca Samsel and Piyush Shrivastava and David Rogers},
url = {https://ieeexplore.ieee.org/document/8413174},
doi = {10.1109/TVCG.2018.2855742},
issn = {1077-2626},
year = {2018},
date = {2018-01-01},
journal = {IEEE Transactions on Visualization and Computer Graphics},
pages = {1-1},
abstract = {Pseudocoloring is one of the most common techniques used in scientific visualization. To apply pseudocoloring to a scalar field, the field value at each point is represented using one of a sequence of colors (called a colormap). One of the principles applied in generating colormaps is uniformity and previously the main method for determining uniformity has been the application of uniform color spaces. Here we present a new method for evaluating the feature discrimination threshold function across a colormap. The method is used in crowdsourced studies for the direct evaluation of nine colormaps for three feature sizes. The results are used to test the hypothesis that a uniform color space (CIELAB) gives too much weight to chromatic differences compared to luminance differences because of the way it was constructed. The hypothesis that feature discrimination can be predicted solely on the basis of luminance is also tested. The results reject both hypotheses and we demonstrate how reduced weights on the green-red and blue-yellow terms of the CIELAB color space creates a more accurate model when the task is the detection of smaller features in colormapped data. Both the method itself and modified CIELAB can be used in colormap design and evaluation.},
note = {LA-UR-18-21476},
keywords = {color perception, Colormapping, feature extraction, frequency measurements, Image color analysis, sea measurements, sensitivity, spatial resolution, Task analysis},
pubstate = {published},
tppubtype = {article}
}
Biswas, Ayan; Dutta, Soumya; Pulido, Jesus; Ahrens, James
In Situ Data-driven Adaptive Sampling for Large-scale Simulation Data Summarization Proceedings Article
In: Proceedings of the Workshop on In Situ Infrastructures for Enabling Extreme-Scale Analysis and Visualization, pp. 13–18, ACM, Dallas, Texas, 2018, ISBN: 978-1-4503-6579-6.
Abstract | Links | BibTeX | Tags: human-centered computing, Mathematics and Computing, scientific visualization, statistical paradigms
@inproceedings{Biswas:2018:SDA:3281464.3281467,
title = {In Situ Data-driven Adaptive Sampling for Large-scale Simulation Data Summarization},
author = {Ayan Biswas and Soumya Dutta and Jesus Pulido and James Ahrens},
url = {http://doi.acm.org/10.1145/3281464.3281467
https://datascience.dsscale.org/wp-content/uploads/2019/01/LA-UR-18-28035.pdf},
doi = {10.1145/3281464.3281467},
isbn = {978-1-4503-6579-6},
year = {2018},
date = {2018-01-01},
booktitle = {Proceedings of the Workshop on In Situ Infrastructures for Enabling Extreme-Scale Analysis and Visualization},
pages = {13--18},
publisher = {ACM},
address = {Dallas, Texas},
series = {ISAV '18},
abstract = {Recent advancements in high-performance computing have enabled scientists to model various scientific phenomena in great detail. However, the analysis and visualization of the output data from such large-scale simulations are posing significant challenges due to their excessive size and disk I/O bottlenecks. One viable solution to this problem is to create a sub-sampled dataset which is able to preserve the important information of the data and also is significantly smaller in size compared to the raw data. Creating an in situ workflow for generating such intelligently sub-sampled datasets is of prime importance for such simulations. In this work, we propose an information-driven data sampling technique and compare it with two well-known sampling methods to demonstrate the superiority of the proposed method. The in situ performance of the proposed method is evaluated by applying it to the Nyx Cosmology simulation. We compare and contrast the performance of these various sampling algorithms and provide a holistic view of all the methods so that the scientists can choose appropriate sampling schemes based on their analysis requirements.},
keywords = {human-centered computing, Mathematics and Computing, scientific visualization, statistical paradigms},
pubstate = {published},
tppubtype = {inproceedings}
}
Zeyen, Max; Post, Tobias; Hagen, Hans; Ahrens, James; Rogers, David; Bujack, Roxana
Color Interpolation for Non-Euclidean Color Spaces Proceedings Article
In: IEEE Scientific Visualization Conference (SciVis) Short Papers, IEEE, 2018.
Abstract | Links | BibTeX | Tags: visualization, visualization techniques
@inproceedings{zeyen2018interpolation,
title = {Color Interpolation for Non-Euclidean Color Spaces},
author = {Max Zeyen and Tobias Post and Hans Hagen and James Ahrens and David Rogers and Roxana Bujack},
url = {https://datascience.dsscale.org/wp-content/uploads/2019/01/ColorInterpolationforNon-EuclideanColorSpaces.pdf},
year = {2018},
date = {2018-01-01},
booktitle = {IEEE Scientific Visualization Conference (SciVis) Short Papers},
publisher = {IEEE},
abstract = {Color interpolation is critical to many applications across a variety of domains, like color mapping or image processing. Due to the characteristics of the human visual system, color spaces whose distance measure is designed to mimic perceptual color differences tend to be non-Euclidean. In this setting, a generalization of established interpolation schemes is not trivial. This paper presents an approach to generalize linear interpolation to colors for color spaces equipped with an arbitrary non-Euclidean distance measure. It makes use of the fact that in Euclidean spaces, a straight line coincides with the shortest path between two points. Additionally, we provide an interactive implementation of our method for the CIELAB color space using the CIEDE2000 distance measure integrated into VTK and ParaView.},
keywords = {visualization, visualization techniques},
pubstate = {published},
tppubtype = {inproceedings}
}
Bujack, Roxana; Turton, Terece; Rogers, David; Ahrens, James
Ordering Perceptions about Perceptual Order Proceedings Article
In: IEEE Scientific Visualization Conference (SciVis) Short Papers, IEEE, 2018.
Abstract | Links | BibTeX | Tags: visualization, visualization techniques
@inproceedings{bujack2018ordering,
title = {Ordering Perceptions about Perceptual Order},
author = {Roxana Bujack and Terece Turton and David Rogers and James Ahrens},
url = {https://datascience.dsscale.org/wp-content/uploads/2019/01/OrderingPerceptionsaboutPerceptualOrder.pdf},
year = {2018},
date = {2018-01-01},
booktitle = {IEEE Scientific Visualization Conference (SciVis) Short Papers},
publisher = {IEEE},
abstract = {One of the most important properties that inherently defines a good colormap is perceptual order. In the literature, we find a wide range of recommendations and hypotheses regarding order. Properties such as monotonicity in luminance, saturation, or hue are/are not stated as necessary/sufficient to ensure perceptual order. In this paper, we gather the most common statements about perceptual order and, when possible, prove or disprove them.},
keywords = {visualization, visualization techniques},
pubstate = {published},
tppubtype = {inproceedings}
}
Sane, Sudhanshu; Bujack, Roxana; Childs, Hank
Revisiting the Evaluation of In Situ Lagrangian Analysis Proceedings Article
In: Childs, Hank; Cucchietti, Fernando (Ed.): Eurographics Symposium on Parallel Graphics and Visualization, The Eurographics Association, 2018, ISSN: 1727-348X.
Abstract | Links | BibTeX | Tags:
@inproceedings{sane2018revisiting,
title = {Revisiting the Evaluation of In Situ Lagrangian Analysis},
author = {Sudhanshu Sane and Roxana Bujack and Hank Childs},
editor = {Hank Childs and Fernando Cucchietti},
url = {https://datascience.dsscale.org/wp-content/uploads/2019/01/RevisitingtheEvaluationofInSituLagrangianAnalysis.pdf},
doi = {10.2312/pgv.20181096},
issn = {1727-348X},
year = {2018},
date = {2018-01-01},
booktitle = {Eurographics Symposium on Parallel Graphics and Visualization},
publisher = {The Eurographics Association},
abstract = {In situ usage of Lagrangian techniques has proven to be superior with respect to emerging supercomputing trends than the traditional Eulerian approach for scientific flow analysis. However, previous studies have not informed two key points: (1) the accuracy of the post hoc interpolated trajectory as a whole and (2) the spatiotemporal tradeoffs involved when using Lagrangian analysis. With this short paper, we address these points. We first conduct a more comprehensive evaluation via additional accuracy metrics tailored for evaluating Lagrangian trajectories. Second, we provide an understanding of the configurations where the Lagrangian approach works well by studying spatiotemporal tradeoffs. In addition, our study highlights the effects of error propagation and accumulation when performing Lagrangian interpolation for large numbers of steps. We believe our study is significant for better understanding the use of in situ Lagrangian techniques, as well as serving as a baseline for future Lagrangian research.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Bujack, Roxana; Rogers, David; Ahrens, James
Reducing Occlusion in Cinema Databases through Feature-Centric Visualizations Proceedings Article
In: Leipzig Symposium on Visualization In Applications (LEVIA), 2018.
Abstract | Links | BibTeX | Tags: cinema, feature, image space, in situ, moment invariants, occlusion, pattern detection
@inproceedings{bujack2018reducing,
title = {Reducing Occlusion in Cinema Databases through Feature-Centric Visualizations},
author = {Roxana Bujack and David Rogers and James Ahrens},
url = {https://datascience.dsscale.org/wp-content/uploads/2019/01/ReducingOcclusioninCinemaDatabasesthroughFeature-CentricVisualizations.pdf},
year = {2018},
date = {2018-01-01},
booktitle = {Leipzig Symposium on Visualization In Applications (LEVIA)},
abstract = {In modern supercomputer architectures, the I/O capabilities do not keep up with the computational speed. Image-based techniques are one very promising approach to a scalable output format for visual analysis, in which a reduced output that corresponds to the visible state of the simulation is rendered in-situ and stored to disk. These techniques can support interactive exploration of the data through image compositing and other methods, but automatic methods of highlighting data and reducing clutter can make these methods more effective. In this paper, we suggest a method of assisted exploration through the combination of feature-centric analysis with image space techniques and show how the reduction of the data to features of interest reduces occlusion in the output for a set of example applications.},
keywords = {cinema, feature, image space, in situ, moment invariants, occlusion, pattern detection},
pubstate = {published},
tppubtype = {inproceedings}
}
Banesh, Divya; Wendelberger, Joanne; Petersen, Mark; Ahrens, James; Hamann, Bernd
Change Point Detection for Ocean Eddy Analysis Proceedings Article
In: Proceedings of the Workshop on Visualisation in Environmental Sciences, pp. 27–33, Eurographics Association, Brno, Czech Republic, 2018, ISBN: 978-3-03868-063-5.
Abstract | Links | BibTeX | Tags: exploratory data analysis, image processing, object detection, regression analysis, time series analysis
@inproceedings{Banesh:2018:CPD:3310180.3310186,
title = {Change Point Detection for Ocean Eddy Analysis},
author = {Divya Banesh and Joanne Wendelberger and Mark Petersen and James Ahrens and Bernd Hamann},
url = {http://dl.acm.org/citation.cfm?id=3310180.3310186
https://dsscale.org/wp-content/uploads/2019/10/dbanesh_ChangeDetection_optimized.pdf},
isbn = {978-3-03868-063-5},
year = {2018},
date = {2018-01-01},
booktitle = {Proceedings of the Workshop on Visualisation in Environmental Sciences},
pages = {27--33},
publisher = {Eurographics Association},
address = {Brno, Czech Republic},
series = {EnvirVis '18},
abstract = {The detection and analysis of mesoscale ocean eddies is a complex task, made more difficult when simulated or observational ocean data are massive. We present the statistical approach of change point detection as a means to help scientists efficiently extract relevant scientific information. We demonstrate the value of change point detection for the characterization of eddy behavior in simulated ocean data. Our results show that change point detection helps with the identification of significant parameter values used in an algorithm or determination of time points that correspond to eddy activity of interest.},
keywords = {exploratory data analysis, image processing, object detection, regression analysis, time series analysis},
pubstate = {published},
tppubtype = {inproceedings}
}
2017
Yang, Bo; Kostkova, Jitka; Flusser, Jan; Suk, Tomas; Bujack, Roxana
Rotation Invariants of Vector Fields from Orthogonal Moments Journal Article
In: Pattern Recognition, no. Supplement C, pp. 110 - 121, 2017, ISSN: 0031-3203, (LA-UR-17-26797, Under a Creative Comms license: http://creativecommons.org/licenses/by-nc-nd/4.0/).
Abstract | Links | BibTeX | Tags: Numerical stability, visualization
@article{yang2017rotation,
title = {Rotation Invariants of Vector Fields from Orthogonal Moments},
author = {Bo Yang and Jitka Kostkova and Jan Flusser and Tomas Suk and Roxana Bujack},
url = {http://datascience.dsscale.org/wp-content/uploads/2017/09/LA-UR-17-26797.pdf},
doi = {10.1016/j.patcog.2017.09.004},
issn = {0031-3203},
year = {2017},
date = {2017-09-11},
booktitle = {Pattern Recognition},
journal = {Pattern Recognition},
number = {Supplement C},
pages = {110 - 121},
abstract = {Abstract Vector field images are a type of new multidimensional data that appear in many engineering areas. Although the vector fields can be visualized as images, they differ from graylevel and color images in several aspects. To analyze them, special methods and algorithms must be originally developed or substantially adapted from the traditional image processing area. In this paper, we propose a method for the description and matching of vector field patterns under an unknown rotation of the field. Rotation of a vector field is so-called total rotation, where the action is applied not only on the spatial coordinates but also on the field values. Invariants of vector fields with respect to total rotation constructed from orthogonal Gaussian--Hermite moments and Zernike moments are introduced. Their numerical stability is shown to be better than that of the invariants published so far. We demonstrate their usefulness in a real world template matching application of rotated vector fields.},
note = {LA-UR-17-26797, Under a Creative Comms license: http://creativecommons.org/licenses/by-nc-nd/4.0/},
keywords = {Numerical stability, visualization},
pubstate = {published},
tppubtype = {article}
}
Patchett, John; Nouanesengsy, Boonthanome; Ahrens, James; Lang, Michael; Rogers, David; Green, Jennifer; Samsel, Francesca; Cone, Giovanni; Hagen, Hans
Delivery of In Situ Capability to End Users Proceedings Article
In: 2017, (USDOE National Nuclear Security Administration (NNSA), LA-UR-17-26655).
Abstract | Links | BibTeX | Tags: In Situ Visualization Analysis; ParaView
@inproceedings{info:lanl-repo/lareport/LA-UR-17-26655,
title = {Delivery of In Situ Capability to End Users},
author = {John Patchett and Boonthanome Nouanesengsy and James Ahrens and Michael Lang and David Rogers and Jennifer Green and Francesca Samsel and Giovanni Cone and Hans Hagen},
url = {http://datascience.dsscale.org/wp-content/uploads/2017/08/LA-UR-17-26655.pdf},
year = {2017},
date = {2017-08-01},
abstract = {Delivery of new technologies to deeply rooted end user workflows can be difficult. We describe a delivery process of a generalized in situ data analysis and visualization capability to both end users and simulation code developers. The process was driven and funded by management which helped ensure success. End users were chosen to exemplify the capability. Delivery is defined to include full integration into the simulation. This includes the simulation’s regular build and testing systems, in addition to institutional support in the supercomputing environments. This paper describes a robust and successful delivery of the in situ capability to our end users.},
note = {USDOE National Nuclear Security Administration (NNSA), LA-UR-17-26655},
keywords = {In Situ Visualization Analysis; ParaView},
pubstate = {published},
tppubtype = {inproceedings}
}
Berres, Anne; Adhinarayanan, Vignesh; Turton, Terece; Feng, Wu; Rogers, David
A Pipeline for Large Data Processing Using Regular Sampling for Unstructured Grids Proceedings Article
In: 2017, (LA-UR-17-23903).
Abstract | Links | BibTeX | Tags: large-scale data, visualization
@inproceedings{info:lanl-repo/lareport/LA-UR-17-23903,
title = {A Pipeline for Large Data Processing Using Regular Sampling for Unstructured Grids},
author = {Anne Berres and Vignesh Adhinarayanan and Terece Turton and Wu Feng and David Rogers},
url = {http://datascience.dsscale.org/wp-content/uploads/2017/08/LA-UR-17-23903.pdf},
year = {2017},
date = {2017-05-12},
abstract = {Large simulation data requires a lot of time and computational resources to compute, store, analyze, visualize, and run user studies. Today, the largest cost of a supercomputer is not hardware but maintenance, in particular energy consumption. Our goal is to balance energy consumption and cognitive value of visualizations of resulting data. This requires us to go through the entire processing pipeline, from simulation to user studies. To reduce the amount of resources, data can be sampled or compressed. While this adds more computation time, the computational overhead is negligible compared to the simulation time. We built a processing pipeline at the example of regular sampling. The reasons for this choice are two-fold: using a simple example reduces unnecessary complexity as we know what to expect from the results. Furthermore, it provides a good baseline for future, more elaborate sampling methods. We measured time and energy for each test we did, and we conducted user studies in Amazon Mechanical Turk (AMT) for a range of different results we produced through sampling.
},
note = {LA-UR-17-23903},
keywords = {large-scale data, visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
Samsel, Francesca; Patchett, John; Rogers, David; Tsai, Karen
Employing Color Theory to Visualize Volume-rendered Multivariate Ensembles of Asteroid Impact Simulations Proceedings Article
In: Proceedings of the 2017 CHI Conference Extended Abstracts on Human Factors in Computing Systems, pp. 1126-1134, ACM, 2017, ISBN: 978-1-4503-4656-6, (LA-UR-17-20419).
Abstract | Links | BibTeX | Tags: colormaps, ensemble visualization, scientific visualization, visualization design, volume rendering
@inproceedings{LAPR-2017-027464,
title = {Employing Color Theory to Visualize Volume-rendered Multivariate Ensembles of Asteroid Impact Simulations},
author = {Francesca Samsel and John Patchett and David Rogers and Karen Tsai},
url = {http://doi.acm.org/10.1145/3027063.3053337},
doi = {10.1145/3027063.3053337},
isbn = {978-1-4503-4656-6},
year = {2017},
date = {2017-05-06},
booktitle = {Proceedings of the 2017 CHI Conference Extended Abstracts on Human Factors in Computing Systems},
pages = {1126-1134},
publisher = {ACM},
series = {CHI EA '17},
abstract = {We describe explorations and innovations developed to help scientists understand an ensemble of large scale simulations of asteroid impacts in the ocean. The simulations were run to help scientists determine the characteristics of asteroids that NASA should track, so that communities at risk from impact can be given advanced notice. Of relevance to the CHI community are 1) hands-on workflow issues specific to exploring ensembles of large scientific data, 2) innovations in exploring such data ensembles with color, and 3) examples of multidisciplinary collaboration.},
note = {LA-UR-17-20419},
keywords = {colormaps, ensemble visualization, scientific visualization, visualization design, volume rendering},
pubstate = {published},
tppubtype = {inproceedings}
}
Patchett, John; Gisler, Galen
Deep Water Impact Ensemble Data Set Technical Report
2017, (LA-UR-17-21595).
Abstract | Links | BibTeX | Tags: asteroid, ensemble data set, In situ analysis, ParaView
@techreport{Patchett2017,
title = {Deep Water Impact Ensemble Data Set},
author = {John Patchett and Galen Gisler},
url = {http://datascience.dsscale.org/wp-content/uploads/2017/08/DeepWaterImpactEnsembleDataSet_Revision1.pdf},
year = {2017},
date = {2017-05-02},
abstract = {This ensemble data set represents the study of asteroid impacts in deep ocean water. NASA’s Planetary Defense Coordination Office [1] is keenly interested to know the lower size limit of dangerous asteroids, so as to focus resources on finding all larger objects that potentially threaten the earth. Since most of the planet’s surface is water, that is where asteroids will most likely impact. This observation has generated a serious debate over the last two decades on just how dangerous impact-induced waves or tsunamis are to populated shorelines.},
note = {LA-UR-17-21595},
keywords = {asteroid, ensemble data set, In situ analysis, ParaView},
pubstate = {published},
tppubtype = {techreport}
}
Adhinarayanan, Vignesh; Feng, Wu-chun; Rogers, David; Ahrens, James; Pakin, Scott
Characterizing and Modeling Power and Energy for Extreme-Scale In-Situ Visualization Proceedings Article
In: 2017 IEEE International Parallel and Distributed Processing Symposium (IPDPS), pp. 978-987, 2017, (LA-UR-16-22435).
Abstract | Links | BibTeX | Tags: energy efficiency, in-situ visualization
@inproceedings{7967188,
title = {Characterizing and Modeling Power and Energy for Extreme-Scale In-Situ Visualization},
author = {Vignesh Adhinarayanan and Wu-chun Feng and David Rogers and James Ahrens and Scott Pakin},
url = {http://datascience.dsscale.org/wp-content/uploads/2017/08/CharacterizingandModelingPowerandEnergyforExtreme-ScaleIn-SituVisualization.pdf},
doi = {10.1109/IPDPS.2017.113},
year = {2017},
date = {2017-05-01},
booktitle = {2017 IEEE International Parallel and Distributed Processing Symposium (IPDPS)},
pages = {978-987},
abstract = {Plans for exascale computing have identified power and energy as looming problems for simulations running at that scale. In particular, writing to disk all the data generated by these simulations is becoming prohibitively expensive due to the energy consumption of the supercomputer while it idles waiting for data to be written to permanent storage. In addition, the power cost of data movement is also steadily increasing. A solution to this problem is to write only a small fraction of the data generated while still maintaining the cognitive fidelity of the visualization. With domain scientists increasingly amenable towards adopting an in-situ framework that can identify and extract valuable data from extremely large simulation results and write them to permanent storage as compact images, a large-scale simulation will commit to disk a reduced dataset of data extracts that will be much smaller than the raw results, resulting in a savings in both power and energy. The goal of this paper is two-fold: (i) to understand the role of in-situ techniques in combating power and energy issues of extreme-scale visualization and (ii) to create a model for performance, power, energy, and storage to facilitate what-if analysis. Our experiments on a specially instrumented, dedicated 150-node cluster show that while it is difficult to achieve power savings in practice using in-situ techniques, applications can achieve significant energy savings due to shorter write times for in-situ visualization. We present a characterization of power and energy for in-situ visualization; an application-aware, architecture-specific methodology for modeling and analysis of such in-situ workflows; and results that uncover indirect power savings in visualization workflows for high-performance computing (HPC).},
note = {LA-UR-16-22435},
keywords = {energy efficiency, in-situ visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
Dutta, Soumya; Woodring, Jon; Shen, Han-Wei; Chen, Jen-Ping; Ahrens, James
Homogeneity guided probabilistic data summaries for analysis and visualization of large-scale data sets Proceedings Article
In: 2017 IEEE Pacific Visualization Symposium (PacificVis), pp. 111-120, 2017, ISSN: 2165-8773, (LA-UR-18-27370).
Abstract | Links | BibTeX | Tags: data visualization, picture/image generation, statistical computing
@inproceedings{8031585,
title = {Homogeneity guided probabilistic data summaries for analysis and visualization of large-scale data sets},
author = {Soumya Dutta and Jon Woodring and Han-Wei Shen and Jen-Ping Chen and James Ahrens},
url = {https://datascience.dsscale.org/wp-content/uploads/2018/08/la-ur_18-27370.pdf},
doi = {10.1109/PACIFICVIS.2017.8031585},
issn = {2165-8773},
year = {2017},
date = {2017-04-01},
booktitle = {2017 IEEE Pacific Visualization Symposium (PacificVis)},
pages = {111-120},
abstract = {High-resolution simulation data sets provide plethora of information, which needs to be explored by application scientists to gain enhanced understanding about various phenomena. Visual-analytics techniques using raw data sets are often expensive due to the data sets' extreme sizes. But, interactive analysis and visualization is crucial for big data analytics, because scientists can then focus on the important data and make critical decisions quickly. To assist efficient exploration and visualization, we propose a new region-based statistical data summarization scheme. Our method is superior in quality, as compared to the existing statistical summarization techniques, with a more compact representation, reducing the overall storage cost. The quantitative and visual efficacy of our proposed method is demonstrated using several data sets along with an in situ application study for an extreme-scale flow simulation.},
note = {LA-UR-18-27370},
keywords = {data visualization, picture/image generation, statistical computing},
pubstate = {published},
tppubtype = {inproceedings}
}
Banesh, Divya; Schoonover, Joseph; Ahrens, James; Hamann, Bernd
Extracting, Visualizing and Tracking Mesoscale Ocean Eddies in Two-dimensional Image Sequences Using Contours and Moments Proceedings Article
In: 2017, (Workshop on Visualisation in Environmental Sciences (EnvirVis), LA-UR-17-21623).
Abstract | Links | BibTeX | Tags: contours, moments, visualization
@inproceedings{Authors2017,
title = {Extracting, Visualizing and Tracking Mesoscale Ocean Eddies in Two-dimensional Image Sequences Using Contours and Moments},
author = {Divya Banesh and Joseph Schoonover and James Ahrens and Bernd Hamann},
url = {http://datascience.dsscale.org/wp-content/uploads/2017/08/ExtractingVisualizingandTrackingMesoscaleOceanEddiesinTwo-dimensionalImageSequencesUsingContoursandMoments.pdf},
year = {2017},
date = {2017-03-31},
abstract = {We introduce a system for the extraction and tracking of mesoscale eddies captured in massive global ocean simulations. The major strength and contribution of our system is its design, which is based on two-dimensional image data processing. The Cinema database [CD] makes possible the generation and storage of two-dimensional image data taken in-situ, i.e., the creation of images via a virtual camera generating images during the ongoing simulation. The problem of eddy extraction and tracking is simplified by our approach to the problem of finding, matching and tracking eddies in two-dimensional images, thus eliminating the task of processing the original massive three-dimensional data set. Our system can be used on a simple desktop computer and provides an intuitive interface allowing a scientist to perform an eddy analysis for global ocean data in real-time. We demonstrate the effectiveness of our implementation for a specific simulated data set.},
note = {Workshop on Visualisation in Environmental Sciences (EnvirVis), LA-UR-17-21623},
keywords = {contours, moments, visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
Samsel, Francesca; Turton, Terece; Wolfram, Phillip; Bujack, Roxana
Intuitive Colormaps for Environmental Visualization Proceedings Article
In: Rink, Karsten; Middel, Ariane; Zeckzer, Dirk; Bujack, Roxana (Ed.): Workshop on Visualisation in Environmental Sciences (EnvirVis), The Eurographics Association, 2017, ISBN: 978-3-03868-040-6, (LA-UR-17-22224).
Abstract | Links | BibTeX | Tags: colormaps, environmental sciences
@inproceedings{info:lanl-repo/lareport/LA-UR-17-22224,
title = {Intuitive Colormaps for Environmental Visualization},
author = {Francesca Samsel and Terece Turton and Phillip Wolfram and Roxana Bujack},
editor = {Karsten Rink and Ariane Middel and Dirk Zeckzer and Roxana Bujack},
url = {http://datascience.dsscale.org/wp-content/uploads/2017/08/IntuitiveColormapsforEnvironmentalVisualization.pdf},
doi = {10.2312/envirvis.20171105},
isbn = {978-3-03868-040-6},
year = {2017},
date = {2017-03-16},
booktitle = {Workshop on Visualisation in Environmental Sciences (EnvirVis)},
publisher = {The Eurographics Association},
abstract = {Visualizations benefit from the use of intuitive colors, enabling an observer to make use of more automatic, subconscious channels. In this paper, we apply the concept of intuitive color to the generation of thematic colormaps for the environmental sciences. In particular, we provide custom sets of colormaps for water, atmosphere, land, and vegetation. These have been integrated into the online tool: ColorMoves: The Environment to enable the environmental scientist to tailor them precisely to the data and tasks in a simple drag-and-drop workflow.},
howpublished = {EnvirVis ; 2017-06-12 - 2017-06-13 ; Barcelona, Spain},
note = {LA-UR-17-22224},
keywords = {colormaps, environmental sciences},
pubstate = {published},
tppubtype = {inproceedings}
}
Patchett, John; Nouanesengsy, Boonthanome; Gisler, Galen; Ahrens, James; Hagen, Hans
In Situ and Post Processing Workflows for Asteroid Ablation Studies Proceedings Article
In: Kozlikova, Barbora; Schreck, Tobias; Wischgoll, Thomas (Ed.): EuroVis 2017 - Short Papers, The Eurographics Association, 2017, ISBN: 978-3-03868-043-7, (LA-UR-17-22699).
Abstract | Links | BibTeX | Tags: asteroid, in situ
@inproceedings{eurovisshort.20171134,
title = {In Situ and Post Processing Workflows for Asteroid Ablation Studies},
author = {John Patchett and Boonthanome Nouanesengsy and Galen Gisler and James Ahrens and Hans Hagen},
editor = {Barbora Kozlikova and Tobias Schreck and Thomas Wischgoll},
url = {http://datascience.dsscale.org/wp-content/uploads/2017/08/LA-UR-17-22699.pdf},
doi = {10.2312/eurovisshort.20171134},
isbn = {978-3-03868-043-7},
year = {2017},
date = {2017-01-01},
booktitle = {EuroVis 2017 - Short Papers},
publisher = {The Eurographics Association},
abstract = {Simulation scientists need to make decisions about what and how much output to produce. They must balance their ability to efficiently ingest the analysis with their ability to get more analysis. We study this balance as a tradeoff between flexibility of saved data products and accessibility of saved data products. One end of the spectrum is raw data that comes directly from the simulation, making it highly flexible, but inaccessible due to its size and format. The other end of the spectrum is highly processed and comparatively small data, often in the form of imagery or single scalar values. This data is typically highly accessible, needing no special equipment or software, but lacks flexibility for deeper analysis than what is presented. We lay out a user driven model that considers the scientists' output needs in regards to flexibility and accessibility. This model allows us to analyze a real-world example of a large simulation lasting months of wall clock time on thousands of processing cores. Though the ensemble of simulation's original intent was to study asteroid generated tsunamis, the simulations are now being used beyond that scope to study the asteroid ablation as it moves through the atmosphere. With increasingly large supercomputers, designing workflows that support an intentional and understood balance of flexibility and accessibility is necessary. In this paper, we present a new strategy developed from a user driven perspective to support the collaborative capability between simulation developers, designers, users and analysts to effectively support science by wisely using both computer and human time.},
note = {LA-UR-17-22699},
keywords = {asteroid, in situ},
pubstate = {published},
tppubtype = {inproceedings}
}
Hamilton, Stephen; Burns, Randal; Meneveau, Charles; Johnson, Perry; Lindstrom, Peter; Patchett, John; Szalay, Alexander S.
Extreme Event Analysis in Next Generation Simulation Architectures Proceedings Article
In: Kunkel, Julian M.; Yokota, Rio; Balaji, Pavan; Keyes, David (Ed.): High Performance Computing: 32nd International Conference, ISC High Performance 2017, Frankfurt, Germany, June 18--22, 2017, Proceedings, pp. 277–293, Springer, Cham Springer International Publishing}, 2017, ISBN: 978-3-319-58667-0.
Abstract | Links | BibTeX | Tags: in situ, visualization
@inproceedings{hamilton2017extreme,
title = {Extreme Event Analysis in Next Generation Simulation Architectures},
author = {Stephen Hamilton and Randal Burns and Charles Meneveau and Perry Johnson and Peter Lindstrom and John Patchett and Alexander S. Szalay},
editor = {Julian M. Kunkel and Rio Yokota and Pavan Balaji and David Keyes},
url = {http://datascience.dsscale.org/wp-content/uploads/2017/08/ExtremeEventAnalysisinNextGenerationSimulationArchitectures.pdf},
doi = {10.1007/978-3-319-58667-0_15},
isbn = {978-3-319-58667-0},
year = {2017},
date = {2017-01-01},
booktitle = {High Performance Computing: 32nd International Conference, ISC High Performance 2017, Frankfurt, Germany, June 18--22, 2017, Proceedings},
pages = {277--293},
publisher = {Springer International Publishing}},
organization = {Springer, Cham},
abstract = {Numerical simulations present challenges because they generate petabyte-scale data that must be extracted and reduced during the simulation. We demonstrate a seamless integration of feature extraction for a simulation of turbulent fluid dynamics. The simulation produces on the order of 6 terabytes per timestep. In order to analyze and store this data, we extract velocity data from a dilated volume of the strong vortical regions and also store a lossy compressed representation of the data. Both reduce data by one or more orders of magnitude. We extract data from user checkpoints in transit while they reside on temporary burst buffer SSD stores. In this way, analysis and compression algorithms are designed to meet specific time constraints so they do not interfere with simulation computations. Our results demonstrate that we can perform feature extraction on a world-class direct numerical simulation of turbulence while it is running and gather meaningful scientific data for archival and post analysis.},
keywords = {in situ, visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
Berres, Anne; Turton, Terece; Rogers, David; Ahrens, James; Petersen, Mark
Video Compression for Ocean Simulation Image Databases Proceedings Article
In: Rink, Karsten; Middel, Ariane; Zeckzer, Dirk; Bujack, Roxana (Ed.): Workshop on Visualization in Environmental Sciences (EnvirVis), The Eurographics Association, 2017, ISBN: 978-3-03868-040-6, (LA-UR-17-21590).
Abstract | Links | BibTeX | Tags: compression, visualization
@inproceedings{info:lanl-repo/lareport/LA-UR-17-21590,
title = {Video Compression for Ocean Simulation Image Databases},
author = {Anne Berres and Terece Turton and David Rogers and James Ahrens and Mark Petersen},
editor = {Karsten Rink and Ariane Middel and Dirk Zeckzer and Roxana Bujack},
url = {http://datascience.dsscale.org/wp-content/uploads/2017/08/VideoCompressionforOceanSimulationImageDatabases.pdf},
doi = {10.2312/envirvis.20171104},
isbn = {978-3-03868-040-6},
year = {2017},
date = {2017-01-01},
booktitle = {Workshop on Visualization in Environmental Sciences (EnvirVis)},
publisher = {The Eurographics Association},
abstract = {Climate research requires monitoring a large range of spatial and temporal scales to understand the climate system and potential future impacts. Climate simulations are now run with very high resolution (1–10 km gridcells) ocean, sea ice, and atmosphere components, and can easily produce petabytes of output. This overloads storage systems and hinders visualization and analysis. Image databases can decrease storage sizes from petabytes of simulation output down to several hundred gigabytes of images.
In this paper, we introduce video compression as a method to further decrease database sizes by 2-4 orders of magnitude. We compare compression and access speeds, compressed sizes, and compression quality over a range of settings. Quality is assessed through image quality metrics and expert feedback. Overall, we were able to show that video compression techniques provide an efficient means of storing image databases at a shareable size, while preserving image quality. This enables the wise use of available disk space, so scientists can more easily study the physical features of interest.},
howpublished = {Visualization in Environmental Sciences 2017 ; 2017-06-12 - 2017-06-13 ; Barcelona, Spain},
note = {LA-UR-17-21590},
keywords = {compression, visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
In this paper, we introduce video compression as a method to further decrease database sizes by 2-4 orders of magnitude. We compare compression and access speeds, compressed sizes, and compression quality over a range of settings. Quality is assessed through image quality metrics and expert feedback. Overall, we were able to show that video compression techniques provide an efficient means of storing image databases at a shareable size, while preserving image quality. This enables the wise use of available disk space, so scientists can more easily study the physical features of interest.
Ware, Colin; Turton, Terece; Samsel, Francesca; Bujack, Roxana; Rogers, David
Evaluating the Perceptual Uniformity of Color Sequences for Feature Discrimination Proceedings Article
In: Lawonn, Kai; Smit, Noeska; Cunningham, Douglas (Ed.): EuroVis Workshop on Reproducibility, Verification, and Validation in Visualization (EuroRV3), The Eurographics Association, 2017, ISBN: 978-3-03868-041-3, (LA-UR-17-24206).
Abstract | Links | BibTeX | Tags: colormaps, visualization
@inproceedings{eurorv3.20171107,
title = {Evaluating the Perceptual Uniformity of Color Sequences for Feature Discrimination},
author = {Colin Ware and Terece Turton and Francesca Samsel and Roxana Bujack and David Rogers},
editor = {Kai Lawonn and Noeska Smit and Douglas Cunningham},
url = {https://diglib.eg.org/handle/10.2312/eurorv320171107},
isbn = {978-3-03868-041-3},
year = {2017},
date = {2017-01-01},
booktitle = {EuroVis Workshop on Reproducibility, Verification, and Validation in Visualization (EuroRV3)},
publisher = {The Eurographics Association},
abstract = {Probably the most common method for visualizing univariate data maps is through pseudocoloring and one of the most commonly cited requirements of a good colormap is that it be perceptually uniform. This means that differences between adjacent colors in the sequence be equally distinct. The practical value of uniformity is for features in the data to be equally distinctive no matter where they lie in the colormap, but there are reasons for thinking that uniformity in terms of feature detection may not be achieved by current methods which are based on the use of uniform color spaces. In this paper we provide a new method for directly evaluating colormaps in terms of their capacity for feature resolution. We apply the method in a study using Amazon Mechanical Turk to evaluate seven colormaps. Among other findings the results show that two new double ended sequences have the highest discriminative power and good uniformity. Ways in which the technique can be applied include the design of colormaps for uniformity, and a method for evaluating colormaps through feature discrimination curves for differently sized features.},
note = {LA-UR-17-24206},
keywords = {colormaps, visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
Turton, Terece; Ware, Colin; Samsel, Francesca; Rogers, David
A Crowdsourced Approach to Colormap Assessment Proceedings Article
In: Lawonn, Kai; Smit, Noeska; Cunningham, Douglas (Ed.): EuroVis Workshop on Reproducibility, Verification, and Validation in Visualization (EuroRV3), The Eurographics Association, 2017, ISBN: 978-3-03868-041-3.
Abstract | Links | BibTeX | Tags: Colormapping, user interfaces
@inproceedings{Turton2017crowdsourced,
title = {A Crowdsourced Approach to Colormap Assessment},
author = {Terece Turton and Colin Ware and Francesca Samsel and David Rogers},
editor = {Kai Lawonn and Noeska Smit and Douglas Cunningham},
url = {http://datascience.dsscale.org/wp-content/uploads/2017/08/ACrowdsourcedApproachtoColormapAssessment.pdf},
doi = {10.2312/eurorv3.20171106},
isbn = {978-3-03868-041-3},
year = {2017},
date = {2017-01-01},
booktitle = {EuroVis Workshop on Reproducibility, Verification, and Validation in Visualization (EuroRV3)},
publisher = {The Eurographics Association},
abstract = {Despite continual research and discussion on the perceptual effects of color in scientific visualization, psychophysical testing is often limited. In-person lab studies can be expensive and time-consuming while results can be difficult to extrapolate from meticulously controlled laboratory conditions to the real world of the visualization user. We draw on lessons learned from the use of crowdsourced participant pools in the behavioral sciences and information visualization to apply a crowdsourced approach to a classic psychophysical experiment assessing the ability of a colormap to impart metric information. We use an online presentation analogous to the color key task from Ware’s 1988 paper, Color Sequences for Univariate Maps, testing colormaps similar to those in the original paper along with contemporary colormap standards and new alternatives in the scientific visualization domain. We explore the issue of potential contamination from color deficient participants and establish that perceptual color research can appropriately leverage a crowdsourced participant pool without significant CVD concerns. The updated version of the Ware color key task also provides a method to assess and compare colormaps.},
keywords = {Colormapping, user interfaces},
pubstate = {published},
tppubtype = {inproceedings}
}
Turton, Terece; Berres, Anne; Rogers, David; Ahrens, James
ETK: An Evaluation Toolkit for Visualization User Studies Proceedings Article
In: Kozlikova, Barbora; Schreck, Tobias; Wischgoll, Thomas (Ed.): EuroVis 2017 – Short Papers, The Eurographics Association, 2017, ISBN: 978-3-03868-043-7.
Abstract | Links | BibTeX | Tags: User Evaluation
@inproceedings{Turton2017etk,
title = {ETK: An Evaluation Toolkit for Visualization User Studies},
author = {Terece Turton and Anne Berres and David Rogers and James Ahrens},
editor = {Barbora Kozlikova and Tobias Schreck and Thomas Wischgoll},
url = {http://datascience.dsscale.org/wp-content/uploads/2017/08/ETKAnEvaluationToolkitforVisualizationUserStudies.pdf},
doi = {10.2312/eurovisshort.20171131},
isbn = {978-3-03868-043-7},
year = {2017},
date = {2017-01-01},
booktitle = {EuroVis 2017 – Short Papers},
publisher = {The Eurographics Association},
abstract = {This paper describes the design and features of the Evaluation Toolkit (ETK), a set of JavaScript/HTML/CSS modules leveraging the Qualtrics JavaScript API that can be used to automate image-based perceptual user evaluation studies. Automating the presentation of the images can greatly decrease the time to build and implement an evaluation study while minimizing the length and complexity of a study built within Qualtrics, along with decreasing the possibility of error in image presentation. The ETK modules each focus on automating a specific psychophysical or experimental approach. Because each module is an extension or plug-in to a Qualtrics question, the resultant study can be easily used in a laboratory setting or in a crowdsourced approach. We present the open source repository of ETK with the six modules that currently make up the toolkit and invite the community to explore, utilize, and contribute to the toolkit.},
keywords = {User Evaluation},
pubstate = {published},
tppubtype = {inproceedings}
}
Bujack, Roxana; Flusser, Jan
Flexible Moment Invariant Bases for 2D Scalar and Vector Fields Proceedings Article
In: Proceedings of International Conference in Central Europe on Computer Graphics, Visualization and Computer Vision (WSCG), 2017, (LA-UR-17-20144).
Abstract | Links | BibTeX | Tags: moment invariants, pattern detection, vector fields
@inproceedings{bujack2017flexible,
title = {Flexible Moment Invariant Bases for 2D Scalar and Vector Fields},
author = {Roxana Bujack and Jan Flusser},
url = {http://datascience.dsscale.org/wp-content/uploads/2017/09/LA-UR-17-20144.pdf},
year = {2017},
date = {2017-01-01},
booktitle = {Proceedings of International Conference in Central Europe on Computer Graphics, Visualization and Computer Vision (WSCG)},
abstract = {Complex moments have been successfully applied to pattern detection tasks in two-dimensional real, complex, and vector valued functions.
In this paper, we review the different bases of rotational moment invariants based on the generator approach with complex monomials. We analyze their properties with respect to independence, completeness, and existence and present superior bases that are optimal with respect to all three criteria for both scalar and vector fields.},
note = {LA-UR-17-20144},
keywords = {moment invariants, pattern detection, vector fields},
pubstate = {published},
tppubtype = {inproceedings}
}
In this paper, we review the different bases of rotational moment invariants based on the generator approach with complex monomials. We analyze their properties with respect to independence, completeness, and existence and present superior bases that are optimal with respect to all three criteria for both scalar and vector fields.
Bujack, Roxana; Turton, Terece; Samsel, Francesca; Ware, Colin; Rogers, David; Ahrens, James
The Good, the Bad, and the Ugly: A Theoretical Framework for the Assessment of Continuous Colormaps Proceedings Article
In: IEEE Visualization, 2017.
Abstract | Links | BibTeX | Tags: color theory, colormaps, linearity
@inproceedings{bujack2017good,
title = {The Good, the Bad, and the Ugly: A Theoretical Framework for the Assessment of Continuous Colormaps},
author = {Roxana Bujack and Terece Turton and Francesca Samsel and Colin Ware and David Rogers and James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2017/10/TheGoodtheBadandtheUgly.pdf},
year = {2017},
date = {2017-01-01},
booktitle = {IEEE Visualization},
abstract = {A myriad of design rules for what constitutes a “good” colormap can be found in the literature. Some common rules include order, uniformity, and high discriminative power. However, the meaning of many of these terms is often ambiguous or open to interpretation. At times, different authors may use the same term to describe different concepts or the same rule is described by varying nomenclature. These ambiguities stand in the way of collaborative work, the design of experiments to assess the characteristics of colormaps, and automated colormap generation.
In this paper, we review current and historical guidelines for colormap design. We propose a specified taxonomy and provide unambiguous mathematical definitions for the most common design rules.},
keywords = {color theory, colormaps, linearity},
pubstate = {published},
tppubtype = {inproceedings}
}
In this paper, we review current and historical guidelines for colormap design. We propose a specified taxonomy and provide unambiguous mathematical definitions for the most common design rules.
Berres, Anne; Turton, Terece; Petersen, Mark; Rogers, David; Ahrens, James
Video Compression for Ocean Simulation Image Databases Proceedings Article
In: Rink, Karsten; Middel, Ariane; Zeckzer, Dirk; Bujack, Roxana (Ed.): Workshop on Visualisation in Environmental Sciences (EnvirVis), The Eurographics Association, 2017, ISBN: 978-3-03868-040-6.
Abstract | Links | BibTeX | Tags:
@inproceedings{Berres2017-VideoCompression,
title = {Video Compression for Ocean Simulation Image Databases},
author = {Anne Berres and Terece Turton and Mark Petersen and David Rogers and James Ahrens},
editor = {Karsten Rink and Ariane Middel and Dirk Zeckzer and Roxana Bujack},
url = {http://ecxproject.org/wp-content/uploads/sites/18/2018/04/envirvis17-video-compression.pdf},
doi = {10.2312/envirvis.20171104},
isbn = {978-3-03868-040-6},
year = {2017},
date = {2017-01-01},
booktitle = {Workshop on Visualisation in Environmental Sciences (EnvirVis)},
publisher = {The Eurographics Association},
abstract = {Climate research requires monitoring a large range of spatial and temporal scales to understand the climate system and potential future impacts. Climate simulations are now run with very high resolution (1--10 km gridcells) ocean, sea ice, and atmosphere components, and can easily produce petabytes of output. This overloads storage systems and hinders visualization and analysis. Image databases can decrease storage sizes from petabytes of simulation output down to several hundred gigabytes of images.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Yang, Bo; Kostkova, Jitka; Flusser, Jan; Suk, Tomas; Bujack, Roxana
Recognition of Patterns in Vector Fields by Gaussian-Hermite Invariants Conference
poster at 2017 IEEE International Conference on Image Processing, ICIP, Beijing, China, 2017.
@conference{yang2017recognition,
title = {Recognition of Patterns in Vector Fields by Gaussian-Hermite Invariants},
author = {Bo Yang and Jitka Kostkova and Jan Flusser and Tomas Suk and Roxana Bujack},
url = {https://datascience.dsscale.org/wp-content/uploads/2019/02/RecognitionofPatternsinVectorFieldsbyGaussian-HermiteInvariants.pdf},
year = {2017},
date = {2017-01-01},
booktitle = {poster at 2017 IEEE International Conference on Image Processing, ICIP, Beijing, China},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Bujack, Roxana; Hagen, Hans
Moment Invariants for Multi-Dimensional Data Proceedings Article
In: Ozerslan, Evren; Schultz, Thomas; Hotz, Ingrid (Ed.): Modelling, Analysis, and Visualization of Anisotropy, Springer Basel AG, 2017, ISSN: 978-3-319-61358-1.
@inproceedings{bujack2017tensor,
title = {Moment Invariants for Multi-Dimensional Data},
author = {Roxana Bujack and Hans Hagen},
editor = {Evren Ozerslan and Thomas Schultz and Ingrid Hotz},
url = {https://datascience.dsscale.org/wp-content/uploads/2019/02/MomentInvariantsforMulti-DimensionalData.pdf},
issn = {978-3-319-61358-1},
year = {2017},
date = {2017-01-01},
booktitle = {Modelling, Analysis, and Visualization of Anisotropy},
publisher = {Springer Basel AG},
series = {Mathematics and Visualization},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Bujack, Roxana; Flusser, Jan
Flexible Moment Invariant Bases for 2D Scalar and Vector Fields Proceedings Article
In: Proceedings of International Conference in Central Europe on Computer Graphics, Visualization and Computer Vision (WSCG), pp. 11–20, 2017.
@inproceedings{bujack2017flexible,
title = {Flexible Moment Invariant Bases for 2D Scalar and Vector Fields},
author = {Roxana Bujack and Jan Flusser},
url = {https://datascience.dsscale.org/wp-content/uploads/2019/02/FlexibleMomentInvariantBasesfor2DScalarandVectorFields.pdf},
year = {2017},
date = {2017-01-01},
booktitle = {Proceedings of International Conference in Central Europe on Computer Graphics, Visualization and Computer Vision (WSCG)},
pages = {11--20},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Wang, Bei; Bujack, Roxana; Paul, Harsh; Hagen, Hans
Interpreting Galilean Invariant Vector Field Analysis via Extended Robustness: Extended Abstract Proceedings Article
In: Topology-Based Methods in Visualization (TopoInVis 2017) Tokyo, Japan, 2017.
@inproceedings{wang2017interpreting,
title = {Interpreting Galilean Invariant Vector Field Analysis via Extended Robustness: Extended Abstract},
author = {Bei Wang and Roxana Bujack and Harsh Paul and Hans Hagen},
url = {https://datascience.dsscale.org/wp-content/uploads/2019/02/InterpretingGalileanInvariantVectorFieldAnalysisviaExtendedRobustness-ExtendedAbstract.pdf},
year = {2017},
date = {2017-01-01},
booktitle = {Topology-Based Methods in Visualization (TopoInVis 2017) Tokyo, Japan},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
2016
Ahrens, James Paul
01.11.2016, (Chesapeake Large-Scale Analytics Conference - CLSAC 2016, LA-UR-16-28491).
Abstract | Links | BibTeX | Tags: large-scale data, ParaView, VTK
@misc{Ahrens_2016,
title = {Towards a Scalable, Platform Independent, User-Friendly Analysis Framework for Scientific and Information Oriented Applications},
author = {James Paul Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2017/08/LA-UR-16-28491.pdf},
year = {2016},
date = {2016-11-01},
abstract = {Options for extreme scale data analysis are often presented as a stark contrast: write large files to disk for interactive exploratory analysis, or perform in situ analysis to save detailed data about phenomena that a user knows about in advance. We describe a novel framework that offers a middle ground - a highly interactive approach that promotes exploration of extracts from source data streams that significantly reduces data movement and storage. This framework is built upon a collection of scalable, platform independent, open-source packages (vtk and ParaView) that have been developed of the past two decades. In this talk, I will describe these frameworks, connections to information oriented streaming approaches, and highlight applications to real-world analysis challenges.},
note = {Chesapeake Large-Scale Analytics Conference - CLSAC 2016, LA-UR-16-28491},
keywords = {large-scale data, ParaView, VTK},
pubstate = {published},
tppubtype = {presentation}
}
Carr, Hamish; Weber, Gunther H.; Sewell, Christopher; Ahrens, James
Parallel Peak Pruning for Scalable SMP Contour Tree Computation Proceedings Article
In: 2016 IEEE 6th Symposium on Large Data Analysis and Visualization (LDAV) , IEEE 2016, (LA-UR-16-24454).
Abstract | Links | BibTeX | Tags: contour tree
@inproceedings{Carr2016,
title = {Parallel Peak Pruning for Scalable SMP Contour Tree Computation},
author = {Hamish Carr and Gunther H. Weber and Christopher Sewell and James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/11/ParallelPeakPruningForScalableSMPContourTreeComputation2.pdf},
year = {2016},
date = {2016-10-23},
booktitle = {2016 IEEE 6th Symposium on Large Data Analysis and Visualization (LDAV) },
organization = {IEEE},
abstract = {As data sets grow to exascale, automated data analysis and visu- alisation are increasingly important, to intermediate human under- standing and to reduce demands on disk storage via in situ anal- ysis. Trends in architecture of high performance computing sys- tems necessitate analysis algorithms to make effective use of com- binations of massively multicore and distributed systems. One of the principal analytic tools is the contour tree, which analyses rela- tionships between contours to identify features of more than local importance. Unfortunately, the predominant algorithms for com- puting the contour tree are explicitly serial, and founded on serial metaphors, which has limited the scalability of this form of analy- sis. While there is some work on distributed contour tree computa- tion, and separately on hybrid GPU-CPU computation, there is no efficient algorithm with strong formal guarantees on performance allied with fast practical performance. We report the first shared SMP algorithm for fully parallel contour tree computation, with for- mal guarantees of O(lgnlgt) parallel steps and O(nlgn) work, and implementations with up to 10⇥ parallel speed up in OpenMP and up to 50⇥ speed up in NVIDIA Thrust.},
note = {LA-UR-16-24454},
keywords = {contour tree},
pubstate = {published},
tppubtype = {inproceedings}
}
Patchett, John; Nouanesengsy, Boonthanome; Fasel, Patricia; Ahrens, James
2016 CSSE L3 Milestone: Deliver In Situ to XTD End Users Technical Report
2016, (LA-UR-16-26987).
Abstract | Links | BibTeX | Tags: in-situ, visualization
@techreport{Patchett2016,
title = {2016 CSSE L3 Milestone: Deliver In Situ to XTD End Users},
author = {John Patchett and Boonthanome Nouanesengsy and Patricia Fasel and James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/10/DeliverInSituToXTDEndUsers.pdf},
year = {2016},
date = {2016-09-13},
abstract = {This report summarizes the activities in FY16 toward satisfying the CSSE 2016 L3 milestone to deliver in situ to XTD end users of EAP codes. The Milestone was accomplished with ongoing work to ensure the capability is maintained and developed. Two XTD end users used the in situ capability in Rage. A production ParaView capability was created in the HPC and Desktop environment. Two new capabilities were added to ParaView in support of an EAP in situ workflow.
We also worked with various support groups at the lab to deploy a production ParaView in the LANL environment for both desktop and HPC systems. . In addition, for this milestone, we moved two VTK based filters from research objects into the production ParaView code to support a variety of standard visualization pipelines for our EAP codes.},
note = {LA-UR-16-26987},
keywords = {in-situ, visualization},
pubstate = {published},
tppubtype = {techreport}
}
We also worked with various support groups at the lab to deploy a production ParaView in the LANL environment for both desktop and HPC systems. . In addition, for this milestone, we moved two VTK based filters from research objects into the production ParaView code to support a variety of standard visualization pipelines for our EAP codes.
Moreland, Kenneth; Sewell, Christopher; Usher, William; Lo, Li-ta; Meredith, Jeremy; Pugmire, David; Kress, James; Schroots, Hendrik; Ma, Kwan-Liu; Childs, Hank; Larsen, Matthew; Chen, Chun-Ming; Maynard, Robert; Geveci, Berk
VTK-m: Accelerating the Visualization Toolkit for Massively Threaded Architectures Proceedings Article
In: pp. 48-58, IEEE Computer Graphics and Applications, 2016, ISSN: 0272-1716, (LA-UR-15-27306).
Abstract | Links | BibTeX | Tags: visualization, VTK-m
@inproceedings{Moreland:2016a,
title = {VTK-m: Accelerating the Visualization Toolkit for Massively Threaded Architectures},
author = {Kenneth Moreland and Christopher Sewell and William Usher and Li-ta Lo and Jeremy Meredith and David Pugmire and James Kress and Hendrik Schroots and Kwan-Liu Ma and Hank Childs and Matthew Larsen and Chun-Ming Chen and Robert Maynard and Berk Geveci},
url = {http://ieeexplore.ieee.org/xpl/articleDetails.jsp?arnumber=7466740},
issn = {0272-1716},
year = {2016},
date = {2016-05-01},
pages = {48-58},
publisher = {IEEE Computer Graphics and Applications},
abstract = {One of the most critical challenges for high-performance computing (HPC) scientific visualization is execution on massively threaded processors. Of the many fundamental changes we are seeing in HPC systems, one of the most profound is a reliance on new processor types optimized for execution bandwidth over latency hiding. Our current production scientific visualization software is not designed for these new types of architectures. To address this issue, the VTK-m framework serves as a container for algorithms, provides flexible data representation, and simplifies the design of visualization algorithms on new and future computer architecture.},
note = {LA-UR-15-27306},
keywords = {visualization, VTK-m},
pubstate = {published},
tppubtype = {inproceedings}
}
Pulido, Jesus; Livescu, Daniel; Burns, Randal; Canada, Curt; Ahrens, James; Hamann, Bernd
Remote Visual Analysis on Large Turbulence Databases at Multiple Scales Presentation
Salishan Conference on High Speed Computing ; 2016-04-25 - 2016-04-29 ; Gleneden Beach, Oregon, United States, 21.04.2016, (LA-UR-16-22778).
Abstract | Links | BibTeX | Tags: large databases, visual analysis
@misc{info:lanl-repo/lareport/LA-UR-16-22778,
title = {Remote Visual Analysis on Large Turbulence Databases at Multiple Scales},
author = {Jesus Pulido and Daniel Livescu and Randal Burns and Curt Canada and James Ahrens and Bernd Hamann},
url = {http://datascience.dsscale.org/wp-content/uploads/2017/08/LA-UR-16-22778.pdf},
year = {2016},
date = {2016-04-21},
abstract = {Extremely large datasets are becoming increasingly common in science and engineering, and it is often prohibitive to store an original massive dataset at multiple sites or to transmit over computer networks in its entirety. Regardless, such datasets represented tremendous scientific value for the broader scientific community. It is imperative to deploy effective technologies enabling the remote access to vast data archives for the purpose of having a large pool of scientists harness their value and make new discoveries. Our analysis framework presented here was driven specifically by the needs articulated by scientists from Johns Hopkins University (JHU) and Los Alamos National Laboratory.},
howpublished = {Salishan Conference on High Speed Computing ; 2016-04-25 - 2016-04-29 ; Gleneden Beach, Oregon, United States},
note = {LA-UR-16-22778},
keywords = {large databases, visual analysis},
pubstate = {published},
tppubtype = {presentation}
}
Ware, Colin; Rogers, David; Petersen, Mark; Ahrens, James; Aygar, Erol
Optimizing for Visual Cognition in High Performance Scientific Computing Journal Article
In: Electronic Imaging, vol. 2016, no. 16, pp. 1–9, 2016, ISSN: 2470-1173.
Abstract | Links | BibTeX | Tags: cognitive efficiency, visualization
@article{ware2016optimizing,
title = {Optimizing for Visual Cognition in High Performance Scientific Computing},
author = {Colin Ware and David Rogers and Mark Petersen and James Ahrens and Erol Aygar},
url = {http://www.ingentaconnect.com/content/ist/ei/2016/00002016/00000016/art00041},
doi = {doi:10.2352/ISSN.2470-1173.2016.16.HVEI-130},
issn = {2470-1173},
year = {2016},
date = {2016-02-14},
journal = {Electronic Imaging},
volume = {2016},
number = {16},
pages = {1--9},
publisher = {Society for Imaging Science and Technology},
abstract = {High performance scientific computing is undergoing radical changes as we move to Exascale (1018 FLOPS) and as a consequence products for visualization must increasingly be generated in-situ as opposed to after a model run. This changes both the nature of the data products and the overall cognitive work flow. Currently, data is saved in the form of model dumps, but these are both extremely large and not ideal for visualization. Instead, we need methods for saving model data in ways that are both compact and optimized for visualization. For example, our results show that animated representations are more perceptually efficient than static views even for steady flows, so we need ways of compressing vector field data for animated visualization. Another example, motion parallax is essential to perceive structures in dark matter simulations, so we need ways of saving large particle systems optimized for perception. Turning to the cognitive work flow, when scientists and engineers allocate their time to high performance computer simulations their effort is distributed between pre and post run work. To better understand the tradeoffs we created an analytics game to model the optimization of high performance computer codes simulating ocean dynamics. Visualization is a key part of this process. The results from two analytics game experiments suggest that simple changes can have a large impact on overall cognitive efficiency. Our first experiment showed that study participants continued to look at images for much longer than optimal. A second experiment revealed a large reduction in cognitive efficiency as working memory demands increased. We conclude with recommendations for systems design.},
keywords = {cognitive efficiency, visualization},
pubstate = {published},
tppubtype = {article}
}
Samsel, Francesca; Klassen, Sebastian; Petersen, Mark; Turton, Terece; Abram, Greg; Rogers, David; Ahrens, James
Interactive Colormapping: Enabling Multiple Data Ranges, Detailed Views of Ocean Salinity Proceedings Article
In: Proceedings of the 34rd Annual ACM Conference Extended Abstracts on Human Factors in Computing Systems, ACM, San Jose, California, 2016, (LA-UR-15-20105).
Abstract | Links | BibTeX | Tags: climate science, color perception, color theory, colormaps, high-resolution datasets, scientific visualization
@inproceedings{Samsel:2016:CIP:2702613.2702975,
title = {Interactive Colormapping: Enabling Multiple Data Ranges, Detailed Views of Ocean Salinity},
author = {Francesca Samsel and Sebastian Klassen and Mark Petersen and Terece Turton and Greg Abram and David Rogers and James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/InteractiveColormapping.pdf},
year = {2016},
date = {2016-01-01},
booktitle = {Proceedings of the 34rd Annual ACM Conference Extended Abstracts on Human Factors in Computing Systems},
publisher = {ACM},
address = {San Jose, California},
series = {CHI EA '16},
abstract = {Ocean salinity is a critical component to understanding climate change. Salinity concentrations and temperature drive large ocean currents which in turn drive global weather patterns. Melting ice caps lower salinity at the poles while river deltas bring fresh water into the ocean worldwide. These processes slow ocean currents, changing weather patterns and producing extreme climate events which disproportionally affect those living in poverty. Analysis of salinity presents a unique visualization challenge. Important data are found in narrow data ranges, varying with global location. Changing values of salinity are important in understanding ocean currents, but are difficult to map to colors using traditional tools. Commonly used colormaps may not provide sufficient detail for this data. Current editing tools do not easily enable a scientist to explore the subtleties of salinity. We present a workflow, enabled by an interactive colormap tool that allows a scientist to interactively apply sophisticated colormaps to scalar data. The intuitive and immediate interaction of the scientist with the data is a critical contribution of this work.},
note = {LA-UR-15-20105},
keywords = {climate science, color perception, color theory, colormaps, high-resolution datasets, scientific visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
Patchett, John; Samsel, Francesca; Tsai, Karen; Gisler, Galen; Rogers, David; Abram, Greg; Turton, Terece
Visualization and Analysis of Threats from Asteroid Ocean Impacts Proceedings Article
In: 2016, (Winner, Best Scientific Visualization & Data Analytics Showcase; LA-UR-16-26258).
Abstract | Links | BibTeX | Tags: asteroid, visualization and data analysis
@inproceedings{Patchett2016asteroidvis,
title = {Visualization and Analysis of Threats from Asteroid Ocean Impacts},
author = {John Patchett and Francesca Samsel and Karen Tsai and Galen Gisler and David Rogers and Greg Abram and Terece Turton},
url = {http://datascience.dsscale.org/wp-content/uploads/2017/08/VisualizationAndAnalysisOfThreatsFromAsteroidOceanImpacts.pdf},
year = {2016},
date = {2016-01-01},
journal = {2016 ACM/IEEE International Conference for High Performance Computing, Networking, Storage, and Analysis (SC)},
abstract = {An asteroid colliding with earth can have grave consequences. An impact in the ocean has complex effects as the kinetic energy of the asteroid is transferred to the water, potentially causing a tsunami or other distant effect. Scientists at Los Alamos National Laboratory are using the xRage simulation code on high performance computing (HPC) systems to understand the range of possible behaviors of an asteroid impacting the ocean. By running ensembles of large scale 3D simulations, scientists can study a set of potential factors for asteroid-generated tsunamis (AGTs) such as angle of impact, asteroid mass and air burst elevation. These studies help scientists understand the consequences of asteroid impacts such as water dispersement into the atmosphere, which can impact the global climate, or tsunami creation, which can place population centers at risk. The results of these simulations will support NASA’s Office of Planetary Defense in deciding how to best track near-Earth objects (NEOs).},
note = {Winner, Best Scientific Visualization & Data Analytics Showcase; LA-UR-16-26258},
keywords = {asteroid, visualization and data analysis},
pubstate = {published},
tppubtype = {inproceedings}
}
Ware, Colin; Bolan, Daniel; Miller, Ricky; Rogers, David; Ahrens, James
Animated Versus Static Views of Steady Flow Patterns Proceedings Article
In: Proceedings of the ACM Symposium on Applied Perception, pp. 77–84, ACM, Anaheim, California, 2016, ISBN: 978-1-4503-4383-1.
Abstract | Links | BibTeX | Tags: animated flow, flow visualization, vector field visualization
@inproceedings{Ware:2016:AVS:2931002.2931012,
title = {Animated Versus Static Views of Steady Flow Patterns},
author = {Colin Ware and Daniel Bolan and Ricky Miller and David Rogers and James Ahrens},
url = {http://doi.acm.org/10.1145/2931002.2931012},
doi = {10.1145/2931002.2931012},
isbn = {978-1-4503-4383-1},
year = {2016},
date = {2016-01-01},
booktitle = {Proceedings of the ACM Symposium on Applied Perception},
pages = {77--84},
publisher = {ACM},
address = {Anaheim, California},
series = {SAP '16},
abstract = {Two experiments were conducted to test the hypothesis that animated representations of vector fields are more effective than common static representations even for steady flow. We compared four flow visualization methods: animated streamlets, animated orthogonal line segments (where short lines were elongated orthogonal to the flow direction but animated in the direction of flow), static equally spaced streamlines, and static arrow grids. The first experiment involved a pattern detection task in which the participant searched for an anomalous flow pattern in a field of similar patterns. The results showed that both the animation methods produced more accurate and faster responses. The second experiment involved mentally tracing an advection path from a central dot in the flow field and marking where the path would cross the boundary of a surrounding circle. For this task the animated streamlets resulted in better performance than the other methods, but the animated orthogonal particles resulted in the worst performance. We conclude with recommendations for the representation of steady flow patterns.},
keywords = {animated flow, flow visualization, vector field visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
O'Leary, Patrick; Ahrens, James; Jourdain, Sebastien; Wittenburg, Scott; Rogers, David H; Petersen, Mark
Cinema image-based in situ analysis and visualization of MPAS-ocean simulations Journal Article
In: PARALLEL COMPUTING, vol. 55, no. SI, pp. 43-48, 2016, ISSN: 0167-8191.
Abstract | Links | BibTeX | Tags: in situ, oceanography simulation and modeling
@article{LAPR-2016-025180,
title = {Cinema image-based in situ analysis and visualization of MPAS-ocean simulations},
author = {Patrick O'Leary and James Ahrens and Sebastien Jourdain and Scott Wittenburg and David H Rogers and Mark Petersen},
url = {http://datascience.dsscale.org/wp-content/uploads/2017/09/Insitumpas-oceanimage-basedvisualization.pdf},
doi = {10.1016/j.parco.2015.10.005},
issn = {0167-8191},
year = {2016},
date = {2016-01-01},
journal = {PARALLEL COMPUTING},
volume = {55},
number = {SI},
pages = {43-48},
abstract = {Due to power and I/O constraints associated with extreme scale scientific simulations, in situ analysis and visualization will become a critical component to scientific exploration and discovery. Current analysis and visualization options at extreme scale are presented in opposition: write files to disk for interactive, exploratory analysis, or perform in situ analysis to save data products about phenomena that a scientists knows about in advance. In this paper, we, demonstrate extreme scale visualization of MPAS-Ocean simulations leveraging a third option based on Cinema, which is a novel framework for highly interactive, image-based in situ analysis and visualization that promotes exploration.},
keywords = {in situ, oceanography simulation and modeling},
pubstate = {published},
tppubtype = {article}
}
Hummel, Mathias; Bujack, Roxana; Joy, Kenneth; Garth, Christoph
Error Estimates for Lagrangian Flow Field Representations Proceedings Article
In: Bertini, Enrico; Elmqvist, Niklas; Wischgoll, Thomas (Ed.): EuroVis 2016 - Short Papers, pp. 7–11, The Eurographics Association, 2016, ISSN: 978-3-03868-014-7.
@inproceedings{hummel2016,
title = {Error Estimates for Lagrangian Flow Field Representations},
author = {Mathias Hummel and Roxana Bujack and Kenneth Joy and Christoph Garth},
editor = {Enrico Bertini and Niklas Elmqvist and Thomas Wischgoll},
url = {https://datascience.dsscale.org/wp-content/uploads/2019/02/ErrorEstimatesforLagrangianFlowFieldRepresentations.pdf},
doi = {10.2312/eurovisshort.20161153},
issn = {978-3-03868-014-7},
year = {2016},
date = {2016-01-01},
booktitle = {EuroVis 2016 - Short Papers},
pages = {7--11},
publisher = {The Eurographics Association},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Chandler, Jennifer; Bujack, Roxana; Joy, Kenneth
Analysis of Error in Interpolation-Based Pathline Tracing Proceedings Article
In: Bertini, Enrico; Elmqvist, Niklas; Wischgoll, Thomas (Ed.): EuroVis 2016 - Short Papers, pp. 1–5, The Eurographics Association, 2016, ISSN: 978-3-03868-014-7.
@inproceedings{chandler2016,
title = {Analysis of Error in Interpolation-Based Pathline Tracing},
author = {Jennifer Chandler and Roxana Bujack and Kenneth Joy},
editor = {Enrico Bertini and Niklas Elmqvist and Thomas Wischgoll},
url = {https://datascience.dsscale.org/wp-content/uploads/2019/02/AnalysisofErrorinInterpolation-BasedPathlineTracing.pdf},
doi = {10.2312/eurovisshort.20161152},
issn = {978-3-03868-014-7},
year = {2016},
date = {2016-01-01},
booktitle = {EuroVis 2016 - Short Papers},
pages = {1--5},
publisher = {The Eurographics Association},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Bujack, Roxana; Hlawitschka, Mario; Joy, Kenneth
Topology-Inspired Galilean Invariant Vector Field Analysis Proceedings Article
In: Proceedings of the IEEE Pacific Visualization Symposium, PacificVis 2016 in Taipei, Taiwan, pp. 72–79, 2016.
@inproceedings{Bujack2016Topology,
title = {Topology-Inspired Galilean Invariant Vector Field Analysis},
author = {Roxana Bujack and Mario Hlawitschka and Kenneth Joy},
url = {https://datascience.dsscale.org/wp-content/uploads/2019/02/Topology-InspiredGalileanInvariantVectorFieldAnalysis.pdf},
year = {2016},
date = {2016-01-01},
booktitle = {Proceedings of the IEEE Pacific Visualization Symposium, PacificVis 2016 in Taipei, Taiwan},
pages = {72--79},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Pulido, Jesus; Livescu, Daniel; Woodring, Jonathan; Ahrens, James; Hamann, Bernd
Survey and analysis of multiresolution methods for turbulence data Journal Article
In: Computers & Fluids, vol. 125, pp. 39 - 58, 2016, ISSN: 0045-7930, (LA-UR-15-20966).
Links | BibTeX | Tags: B-spline wavelet, Curvelet, Surfacelet, turbulence, Wavelet
@article{PULIDO201639b,
title = {Survey and analysis of multiresolution methods for turbulence data},
author = {Jesus Pulido and Daniel Livescu and Jonathan Woodring and James Ahrens and Bernd Hamann},
url = {http://www.sciencedirect.com/science/article/pii/S004579301500362X},
doi = {https://doi.org/10.1016/j.compfluid.2015.11.001},
issn = {0045-7930},
year = {2016},
date = {2016-01-01},
journal = {Computers & Fluids},
volume = {125},
pages = {39 - 58},
note = {LA-UR-15-20966},
keywords = {B-spline wavelet, Curvelet, Surfacelet, turbulence, Wavelet},
pubstate = {published},
tppubtype = {article}
}
2015
Samsel, Francesca
Understanding via Color Presentation
27.10.2015, (IEEE Vis 2015 Color Mapping Panel).
Abstract | Links | BibTeX | Tags: colormaps
@misc{Samsel1027,
title = {Understanding via Color},
author = {Francesca Samsel},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/08/UnderstandingViaColor.pptx
http://ieeevis.org/year/2015/info/overview-amp-topics/panels},
year = {2015},
date = {2015-10-27},
abstract = {In this panel, we highlight optimal solutions for designing and building color maps in visualization applications and presentations. Our panelists represent artists, software engineers, cartographers, color scientists, perceptual psychologists, and visualization researchers who have contributed effective solutions to applying color to data visualization. Each panelist will highlight their perspective as well as tips and tricks for color map solutions. Drawing on perspectives from many disciplines, the panel will identify gaps in our understanding about the use of color in visualization and will identify future research directions.},
note = {IEEE Vis 2015 Color Mapping Panel},
keywords = {colormaps},
pubstate = {published},
tppubtype = {presentation}
}
Kares, Robert J.
In-Situ Visualization Experiments with ParaView Cinema in RAGE Technical Report
2015, (LA-UR-15-28026).
Abstract | Links | BibTeX | Tags: catalyst, cinema, ParaView
@techreport{Kares2015,
title = {In-Situ Visualization Experiments with ParaView Cinema in RAGE},
author = {Robert J. Kares},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/In-SituVisualizationExperimentsWithParaViewCinemaInRAGE.pdf},
year = {2015},
date = {2015-10-15},
abstract = {In a previous paper Robert Kares described some numerical experiments performed using the ParaView/Catalyst in-situ visualization infrastructure deployed in the Los Alamos RAGE radiation-hydrodynamics code to produce images from a running large scale 3D ICF simulation. One challenge of the in-situ approach apparent in these experiments was the difficulty of choosing parameters likes isosurface values for the visualizations to be produced from the running simulation without the benefit of prior knowledge of the simulation results and the resultant cost of recomputing in-situ generated images when parameters are chosen sub- optimally. A proposed method of addressing this difficulty is to simply render multiple images at runtime with a range of possible parameter values to produce a large database of images and to provide the user with a tool for managing the resulting database of imagery. Recently, ParaView/Catalyst has been extended to include such a capability via the so-called Cinema framework. Here Kares describes some initial experiments with the first delivery of Cinema and make some recommendations for future extensions of Cinema’s capabilities.},
note = {LA-UR-15-28026},
keywords = {catalyst, cinema, ParaView},
pubstate = {published},
tppubtype = {techreport}
}
Adhinarayanan, Vignesh
Performance, Power and Energy of In-situ and Post-processing Visualization: A Case Study in Climate Simulation Presentation
05.10.2015, (LA-UR-15-27749).
Abstract | Links | BibTeX | Tags: energy, in-situ, performance, post-processing, power
@misc{Adhinarayanan2015,
title = {Performance, Power and Energy of In-situ and Post-processing Visualization: A Case Study in Climate Simulation},
author = {Vignesh Adhinarayanan},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/07/PerformancePowerAndEnergyOfInSituAndPostProcessingVisualization.pdf},
year = {2015},
date = {2015-10-05},
abstract = {This presentation summarizes a summer study of the performance, power, and energy trade-offs among traditional post-processing, modern post-processing, and in-situ visualization pipelines. It includes both detailed sub-component level power measurements within a node to gain detailed insights and measurements at scale to understand problems unique to big supercomputers.},
note = {LA-UR-15-27749},
keywords = {energy, in-situ, performance, post-processing, power},
pubstate = {published},
tppubtype = {presentation}
}
Eatmon, Arnold
Generating Cinema Databases for In Situ Visualization of Ocean Modeling Simulations Presentation
05.10.2015, (LA-UR-15-27748).
Abstract | Links | BibTeX | Tags: cinema, oceanography simulation and modeling
@misc{Eatmon2015,
title = {Generating Cinema Databases for In Situ Visualization of Ocean Modeling Simulations},
author = {Arnold Eatmon},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/12/Eatmon2015.pdf},
year = {2015},
date = {2015-10-05},
abstract = {Science is changing. In the last few years science has seen a dramatic shift towards data intensive discovery, a combination of past paradigms of discovery integrated with computational power and an ample supply of data from which we can derive information.
One notable problem with this is that while data and computational power are increasing, storage is decreasing. Storage in this day and age is a resource, and resources are inherently limited. Due to being a resource decisions must be made on how to wisely utilize storage to tackle scientific challenges.
Cinema databases allow for in situ processing and visualization, eliminating the need to write large amounts of data to disk. Cinema databases allow for scientists to not only view the data but also to interact with the data in meaningful ways. Simultaneously, cinema databases drastically reduce the amount of storage utilized in simulation.
In this project, I applied cinema database technology to a climate simulation model, MPAS-Ocean.},
note = {LA-UR-15-27748},
keywords = {cinema, oceanography simulation and modeling},
pubstate = {published},
tppubtype = {presentation}
}
One notable problem with this is that while data and computational power are increasing, storage is decreasing. Storage in this day and age is a resource, and resources are inherently limited. Due to being a resource decisions must be made on how to wisely utilize storage to tackle scientific challenges.
Cinema databases allow for in situ processing and visualization, eliminating the need to write large amounts of data to disk. Cinema databases allow for scientists to not only view the data but also to interact with the data in meaningful ways. Simultaneously, cinema databases drastically reduce the amount of storage utilized in simulation.
In this project, I applied cinema database technology to a climate simulation model, MPAS-Ocean.
Shaikh, Uzma
Summer of Storyboards Presentation
05.10.2015, (LA-UR-15-27737).
Abstract | Links | BibTeX | Tags: cinema, storyboards
@misc{Shaikh2015,
title = {Summer of Storyboards},
author = {Uzma Shaikh},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/07/Summer_of_Storyboards.pdf},
year = {2015},
date = {2015-10-05},
abstract = {This presentation summarizes a summer project to create a Cinema web interface which provides both a generalized or holistic view of the system in a global view and a specified view of the system in a web interface.},
note = {LA-UR-15-27737},
keywords = {cinema, storyboards},
pubstate = {published},
tppubtype = {presentation}
}
Bryan, Christopher
Coyote Universe Emulator for Web Presentation
05.10.2015, (LA-UR-15-27732).
Abstract | Links | BibTeX | Tags: matter power spectrum, prediction
@misc{Bryan2015,
title = {Coyote Universe Emulator for Web},
author = {Christopher Bryan},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/08/Coyote_Universe_Emulator_for_Web.pdf},
year = {2015},
date = {2015-10-05},
abstract = {This presentatioin summarizes a summer project to create a prediction tool or 'Cosmic Emu(lator)' for the matter power spectrum P(k).},
note = {LA-UR-15-27732},
keywords = {matter power spectrum, prediction},
pubstate = {published},
tppubtype = {presentation}
}
Usher, William
Summer 2015 LANL Exit Talk Presentation
05.10.2015, (LA-UR-15-27730).
Abstract | Links | BibTeX | Tags: OpenMP, VTK-m
@misc{Usher2010,
title = {Summer 2015 LANL Exit Talk},
author = {William Usher},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/08/Summer_2015_LANL_Exit_Talk.pdf},
year = {2015},
date = {2015-10-05},
abstract = {This presentation summarizes summer work on writing the OpenMP backend and making general performance improvements and comparisions in VTK-m. In the area of performance measurements and improvements a benchmarking suite to VTK-m to compare backends and changes to backends was added. Additionally, the default storage type was migrated to use an aligned allocator to improve CPU and MIC performance. In the area of OpenMP backend Jeff Inman's hand-vectorized MIC scan was ported to a generic version in VTK-m, achieving somewhat comparable performance and he working on implementing a parallel quick sort for the backend as well, but still some work left to do.},
note = {LA-UR-15-27730},
keywords = {OpenMP, VTK-m},
pubstate = {published},
tppubtype = {presentation}
}
Pulido, Jesus
Survey and Analysis of Multiresolution Methods for Turbulence Data Presentation
05.10.2015, (LA-UR-15-27727).
Abstract | Links | BibTeX | Tags: Multi-resolution, turbulence
@misc{Pulido2015,
title = {Survey and Analysis of Multiresolution Methods for Turbulence Data},
author = {Jesus Pulido},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/08/Survey_and_Analysis_of_Multiresolution_Methods_for_Turbulence_Data.pdf},
year = {2015},
date = {2015-10-05},
abstract = {The first section of this presentation summarizes summer work that analyzed and compared the effectiveness of Haar, Biorthogonal B-spline, and Daubechies wavelets, Coiflets, Surfacelets and Curvelets with respect to direct numerical simulation (DNS) data, visualized and evaluated numerical accuracy of original data and derived quantities using limited amounts of coefficients on reconstruction, and identified strengths and weaknesses of each technique and gave recommendations on the usage of these multi-resolution methods.},
note = {LA-UR-15-27727},
keywords = {Multi-resolution, turbulence},
pubstate = {published},
tppubtype = {presentation}
}
Pulido, Jesus
Enabling Remote Visualization and Scale Analysis of Large Turbulence Databases Presentation
05.10.2015, (LA-UR-15-27727).
Abstract | Links | BibTeX | Tags: remote visualization, turbulence
@misc{Pulido2015b,
title = {Enabling Remote Visualization and Scale Analysis of Large Turbulence Databases},
author = {Jesus Pulido},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/08/Enabling_Remote_Visualization_and_Scale_Analysis_of_Large_Turbulence_Databases.pdf},
year = {2015},
date = {2015-10-05},
abstract = {The second section of this presentation summarizes summer work that added remote visualization support and additional compute capabilities to the Johns Hopkins Turbulence Database (JHTDB), introduced wavelet compression at the data-level to reduce access cost, bandwidth, and improve visualization latency, used wavelet compression to reduce memory footprint of datasets for visualization.},
note = {LA-UR-15-27727},
keywords = {remote visualization, turbulence},
pubstate = {published},
tppubtype = {presentation}
}
Dutta, Soumya
Summer 2015 Final Presentation Presentation
05.10.2015, (LA-UR-15-27726).
Abstract | Links | BibTeX | Tags: algorithm comparison framework, in situ, OpenMC
@misc{Dutta2015,
title = {Summer 2015 Final Presentation},
author = {Soumya Dutta},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/08/Summer_2015_Final_Presentation.pdf},
year = {2015},
date = {2015-10-05},
abstract = {This presentation includes a discussion of several data representations and a global algorithm comparison framework and of in-situ early convergence detection on a Monte Carlo based simulation called OpenMC.},
note = {LA-UR-15-27726},
keywords = {algorithm comparison framework, in situ, OpenMC},
pubstate = {published},
tppubtype = {presentation}
}
Barnes, David C.
Image Clustering of Scientific Databases Presentation
05.10.2015, (LA-UR-15-27725).
Abstract | Links | BibTeX | Tags: cinema, clustering
@misc{Barnes2015,
title = {Image Clustering of Scientific Databases},
author = {David C. Barnes},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/08/Image_Clustering_of_Scientific_Databases.pdf
http://datascience.dsscale.org/wp-content/uploads/2016/08/Data_Science_Cinema_Poster_Full.png},
year = {2015},
date = {2015-10-05},
abstract = {This presentation summarizes summer work to provide image clustering of scientific databases. },
note = {LA-UR-15-27725},
keywords = {cinema, clustering},
pubstate = {published},
tppubtype = {presentation}
}
Lu, Kewei
Portable Data Parallel Visualization Algorithms with VTK-m Presentation
05.10.2015, (LA-UR-15-27724).
Abstract | Links | BibTeX | Tags: data parallel, VTK-m
@misc{Lu2015,
title = {Portable Data Parallel Visualization Algorithms with VTK-m},
author = {Kewei Lu
},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/08/Portable_Data_Parallel_Visualization_Algorithms_with_VTK-m.pdf},
year = {2015},
date = {2015-10-05},
abstract = {This presentation summarizes summer work to develop portable data parallel visualization algorithms in VTK-m. This included writing visualization filters for streamlines and stream surfaces as well as modifying the original isosurface implementation using the new data model and worklets.},
note = {LA-UR-15-27724},
keywords = {data parallel, VTK-m},
pubstate = {published},
tppubtype = {presentation}
}
Biswas, Ayan
Summer of 2015: An Exit Talk Presentation
05.10.2015, (LA-UR-15-27738).
Abstract | Links | BibTeX | Tags: MPAS-Ocean, streamlines
@misc{Biswas2015,
title = {Summer of 2015: An Exit Talk},
author = {Ayan Biswas},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/08/Summer_of_2015_An_Exit_Talk.pdf},
year = {2015},
date = {2015-10-05},
abstract = {This presentation summarizes the summer development of a new parallel algorithm for visualization of streamlines in MPAS-Ocean.},
note = {LA-UR-15-27738},
keywords = {MPAS-Ocean, streamlines},
pubstate = {published},
tppubtype = {presentation}
}
Ahrens, James
Supercharging the Scientific Process Via Data Science at Scale Presentation
30.06.2015, (LA-UR-pending).
Abstract | Links | BibTeX | Tags: data science at scale, scientific method
@misc{Ahrens2016,
title = {Supercharging the Scientific Process Via Data Science at Scale},
author = {James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/07/Supercharging_the_Scientific_Process_Via_Data_Science_at-Scale_IRTG2057.pptx
http://www.irtg2057.de/index.html},
year = {2015},
date = {2015-06-30},
abstract = {Supercharging the Scientific Process Via Data Science at Scale Historically, the scientific process is used to explain a phenomena by iteratively formulating a theory and running real-world experiments to test and improve the theory. Advances in the field of computer engineering, driven by Moore's law (which states the number of transistors per square inch on integrated circuits doubles every year) has fundamentally changed the scientific process in two ways. The first change is the availability of inexpensive but high accurate sensors composed of integrated circuits. The sensors, such as extremely high resolution cameras and signal recorders, enable the collection of scientific data and are used in all scientific disciplines including astronomy, physics, biology and more recently the social sciences. The second change is the addition of highly detailed scientific simulations that run on high performance computing (HPC) platforms. The performance of these HPC platforms has increased by approximately six orders of magnitude over the past two decades from terascale (10^12 Floating Points Operations Per Second (FLOPS)) to petascale (10^15 FLOPS). This performance increase has enabled the creation of extremely detail scientific simulations. These simulations augment the scientific process by providing a proving ground for theories and an environment for virtual experimentation. Both changes produce massive data streams that need to be effectively processed, transformed, analyzed and understood through data science. In this talk, I will present new developments in data science, highlighting how the scientific simulation process needs to change for exascale supercomputers (10^18 FLOPS). Exascale supercomputers are bounded by power and storage constraints. These constraints require us to transition from standard, storage-based, post-processing, data science approaches to intelligent, automated, streaming, in situ ones. I will present a new approach that focuses on automatically identifying and tracking areas of interest, and then on selecting and presenting these areas to scientists. The work will be presented in the context of solving real-world data science problems for the climate and cosmological science communities.},
note = {LA-UR-pending},
keywords = {data science at scale, scientific method},
pubstate = {published},
tppubtype = {presentation}
}
Ahrens, James
Supercharging the Scientific Process Via Data Science at Scale Presentation
30.06.2015, (LA-UR-pending).
Abstract | Links | BibTeX | Tags: data science at scale, scientific method
@misc{Ahrens2015,
title = {Supercharging the Scientific Process Via Data Science at Scale},
author = {James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/08/Supercharging_the_Scientific_Process_Via_Data_Science_at-Scale_Groningen.pptx
http://www.rug.nl/research/fmns/themes/dssc/symposium/},
year = {2015},
date = {2015-06-30},
abstract = {Supercharging the Scientific Process Via Data Science at Scale Historically, the scientific process is used to explain a phenomena by iteratively formulating a theory and running real-world experiments to test and improve the theory. Advances in the field of computer engineering, driven by Moore's law (which states the number of transistors per square inch on integrated circuits doubles every year) has fundamentally changed the scientific process in two ways. The first change is the availability of inexpensive but high accurate sensors composed of integrated circuits. The sensors, such as extremely high resolution cameras and signal recorders, enable the collection of scientific data and are used in all scientific disciplines including astronomy, physics, biology and more recently the social sciences. The second change is the addition of highly detailed scientific simulations that run on high performance computing (HPC) platforms. The performance of these HPC platforms has increased by approximately six orders of magnitude over the past two decades from terascale (10^12 Floating Points Operations Per Second (FLOPS)) to petascale (10^15 FLOPS). This performance increase has enabled the creation of extremely detail scientific simulations. These simulations augment the scientific process by providing a proving ground for theories and an environment for virtual experimentation. Both changes produce massive data streams that need to be effectively processed, transformed, analyzed and understood through data science. In this talk, I will present new developments in data science, highlighting how the scientific simulation process needs to change for exascale supercomputers (10^18 FLOPS). Exascale supercomputers are bounded by power and storage constraints. These constraints require us to transition from standard, storage-based, post-processing, data science approaches to intelligent, automated, streaming, in situ ones. I will present a new approach that focuses on automatically identifying and tracking areas of interest, and then on selecting and presenting these areas to scientists. The work will be presented in the context of solving real-world data science problems for the climate and cosmological science communities.},
note = {LA-UR-pending},
keywords = {data science at scale, scientific method},
pubstate = {published},
tppubtype = {presentation}
}
Rogers, David; Ahrens, James; Patchett, John; DeMarle, David
Exploring Cinema with the Cinema Virtual Machine Proceedings Article
In: 2015, (Documentation/instructions. LA-UR-15-21934).
Abstract | Links | BibTeX | Tags: cinema, in-situ data analysis
@inproceedings{rogers2015exploring,
title = {Exploring Cinema with the Cinema Virtual Machine},
author = {David Rogers and James Ahrens and John Patchett and David DeMarle},
url = {http://datascience.dsscale.org/wp-content/uploads/2017/08/LA-UR-15-21934.pdf},
year = {2015},
date = {2015-05-27},
abstract = {Extreme scale scientific simulations are pushing the limits of scientific computation, and are stressing the limits of the data that we can store, explore, and understand. Options for extreme scale data analysis are often presented as a stark contrast: save massive data files to disk for interactive, exploratory visualization, or perform in situ analysis to save detailed data about phenomena a scientist knows about in advance. We propose that there is an alternative approach—a highly interactive, image-based approach that promotes exploration of simulation results, and is easily accessed through extensions to widely used open source tools. This new approach supports interactve exploration of a wide
range of results, while still significantly reducing data movement and storage.},
note = {Documentation/instructions. LA-UR-15-21934},
keywords = {cinema, in-situ data analysis},
pubstate = {published},
tppubtype = {inproceedings}
}
range of results, while still significantly reducing data movement and storage.
Adhinarayanan, Vignesh; Feng, Wu-chun; Woodring, Jonathan; Rogers, David; Ahrens, James
On the Greenness of In-Situ and Post-Processing Visualization Pipelines Proceedings Article
In: 11th workshop on High-Performance, Power-Aware Computing (HPPAC), Hyderabad, India, 2015, (LA-UR-15-21414).
Abstract | Links | BibTeX | Tags: greenness, in-situ, pipelines, post-processing, visualization
@inproceedings{vignesh-in-situ-hppac15,
title = {On the Greenness of In-Situ and Post-Processing Visualization Pipelines},
author = {Vignesh Adhinarayanan and Wu-chun Feng and Jonathan Woodring and David Rogers and James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/OnTheGreenessOfIn-SituAndPost-ProcessingVisualizationPipelines.pdf},
year = {2015},
date = {2015-05-01},
booktitle = {11th workshop on High-Performance, Power-Aware Computing (HPPAC)},
address = {Hyderabad, India},
abstract = {Post-processing visualization pipelines are tradi- tionally used to gain insight from simulation data. However, changes to the system architecture for high-performance com- puting (HPC), dictated by the exascale goal, have limited the applicability of post-processing visualization. As an alternative, in-situ pipelines are proposed in order to enhance the knowl- edge discovery process via “real-time” visualization. Quantitative studies have already shown how in-situ visualization can improve performance and reduce storage needs at the cost of scientific exploration capabilities. However, to fully understand the trade- off space, a head-to-head comparison of power and energy (between the two types of visualization pipelines) is necessary. Thus, in this work, we study the greenness (i.e., power, energy, and energy efficiency) of the in-situ and the post-processing visualization pipelines, using a proxy heat-transfer simulation as an example. For a realistic I/O load, the in-situ pipeline consumes 43% less energy than the post-processing pipeline. Contrary to expectations, our findings also show that only 9% of the total energy is saved by reducing off-chip data movement, while the rest of the savings comes from reducing the system idle time. This suggests an alternative set of optimization techniques for reducing the power consumption of the traditional post- processing pipeline.},
note = {LA-UR-15-21414},
keywords = {greenness, in-situ, pipelines, post-processing, visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
Widanagamaachchi, Wathsala; Hammond, Karl D.; Lo, Li-Ta; Wirth, Brian D.; Samsel, Francesca; Sewell, Christopher; Ahrens, James; Pascucci, Valerio
Visualization of Large-Scale Atomistic Simulations of Plasma-Surface Interactions Proceedings Article
In: Proceedings of EuroVis (short paper), Cagliari, Italy, 2015, (LA-UR-15-21194).
Abstract | Links | BibTeX | Tags: atomistic simulation, large-scale, Model Validation and Analysis, Physical Sciences and Engineering, plasma-surface interactions, visualization
@inproceedings{Widanagamaachchi:2015a,
title = {Visualization of Large-Scale Atomistic Simulations of Plasma-Surface Interactions},
author = {Wathsala Widanagamaachchi and Karl D. Hammond and Li-Ta Lo and Brian D. Wirth and Francesca Samsel and Christopher Sewell and James Ahrens and Valerio Pascucci},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/VisualizationAndAnalysisOfLarge-ScaleAtomisticSimulationsOfPlasma-SurfaceInteractions.pdf},
year = {2015},
date = {2015-05-01},
booktitle = {Proceedings of EuroVis (short paper)},
address = {Cagliari, Italy},
abstract = {We present a simulation–visualization pipeline that uses the LAMMPS Molecular Dynamics Simulator and the Visualization Toolkit to create a visualization and analysis environment for atomistic simulations of plasma–surface interactions. These simulations are used to understand the origin of fuzz-like, microscopic damage to tungsten and other metal surfaces by helium. The proposed pipeline serves both as an aid to visualization, i.e. drawing the surfaces of gas bubbles and voids/cavities in the metal, as well as a means of analysis, i.e. extracting various statistics and gas bubble evolution details. The result is a better understanding of the void and bubble formation process that is difficult if not impossible to get using conventional atomistic visualization software.},
note = {LA-UR-15-21194},
keywords = {atomistic simulation, large-scale, Model Validation and Analysis, Physical Sciences and Engineering, plasma-surface interactions, visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
Ahrens, James
Shared Analysis for Resilience, Debugging, Verification, Validation and Discovery Presentation
30.04.2015, (LA-UR-15-23284).
Abstract | Links | BibTeX | Tags: shared analysis
@misc{Ahrens2015b,
title = {Shared Analysis for Resilience, Debugging, Verification, Validation and Discovery},
author = {James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/08/SharedAnalysisFoResilienceDebuggingVerificationValidationAndDiscovery.pdf},
year = {2015},
date = {2015-04-30},
abstract = {This talk given by James Ahrens at the Salesian 2015 meeting describes, from an application perspective, the interconnectedness of simulation analysis, debugging and resilience. It advocates for a joint approach to addressing these challenges based on a framework that supports information gathering, event detection and triggered actions.},
note = {LA-UR-15-23284},
keywords = {shared analysis},
pubstate = {published},
tppubtype = {presentation}
}
Ahrens, James
Implications of Numerical and Data Intensive Technology Trends on Scientific Visualization and Analysis Presentation
18.03.2015, (LA-UR-pending).
Abstract | Links | BibTeX | Tags: scientific visualization
@misc{Ahrens2015b,
title = {Implications of Numerical and Data Intensive Technology Trends on Scientific Visualization and Analysis},
author = {James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/08/Implications_of_Numerical_and_Data_Intensive_Technology_Trends_on_Scientific_Visualization_and_Analysis.pdf
https://www.pathlms.com/siam/courses/1043/sections/1261/thumbnail_video_presentations/9886
http://www.siam.org/meetings/cse15/cse15_program.pdf},
year = {2015},
date = {2015-03-18},
abstract = {Technology trends in numerically and data intensive computing have the potential to reshape and significantly advance how we visualize and analyze the results of scientific simulations. However, next generation numerically intensive supercomputers are bound by power and storage constraints. These require us to transition from standard post-processing visualization and analysis approaches to intelligent, automated in-situ ones. In addition, data intensive technology trends that support accessing and understanding our data using intuitive, web-based and query-driven interfaces are now the norm. In this talk, I will discuss these trends and several freely available, open-source approaches that leverage them.},
note = {LA-UR-pending},
keywords = {scientific visualization},
pubstate = {published},
tppubtype = {presentation}
}
Ahrens, James
Accelerating Time to Insight in the Exascale Ecosystem Through the Optimization of Scientific Workflows Presentation
30.01.2015, (LA-UR-15-20354).
Abstract | Links | BibTeX | Tags: scientific workflows
@misc{Ahrens2015b,
title = {Accelerating Time to Insight in the Exascale Ecosystem Through the Optimization of Scientific Workflows},
author = {James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/08/AcceleratingExtremeScaleWorkflowsAhrensBDEC2015.pdf
http://www.exascale.org/bdec/},
year = {2015},
date = {2015-01-30},
abstract = {The purpose of this white paper is to highlight software and hardware trends that will impact workflows at exascale and to describe a pathforward that harness these changes for workflow acceleration. By applying trends of automation, functional abstraction, the availability of compute power 'everywhere' and accurate cost modeling to our scientific workflows, scientific productivity can be greatly improved. Our goal is to identify, automate and accelerate the manual operations that pervade our current workflows.},
note = {LA-UR-15-20354},
keywords = {scientific workflows},
pubstate = {published},
tppubtype = {presentation}
}
Samsel, Francesca; Petersen, Mark; Geld, Terece; Abram, Greg; Wendelberger, Joanne; Ahrens, James
Colormaps That Improve Perception of High-Resolution Ocean Data Proceedings Article
In: Proceedings of the 33rd Annual ACM Conference Extended Abstracts on Human Factors in Computing Systems, pp. 703–710, ACM, Seoul, Republic of Korea, 2015, ISBN: 978-1-4503-3146-3, (LA-UR-15-20105).
Abstract | Links | BibTeX | Tags: climate science, color perception, color theory, colormaps, high-resolution datasets, scientific visualization
@inproceedings{Samsel:2015:CIP:2702613.2702975,
title = {Colormaps That Improve Perception of High-Resolution Ocean Data},
author = {Francesca Samsel and Mark Petersen and Terece Geld and Greg Abram and Joanne Wendelberger and James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/ColormapsThatImprovePerceptionOfHigh-ResolutionOceanData.pdf},
doi = {10.1145/2702613.2702975},
isbn = {978-1-4503-3146-3},
year = {2015},
date = {2015-01-01},
booktitle = {Proceedings of the 33rd Annual ACM Conference Extended Abstracts on Human Factors in Computing Systems},
pages = {703--710},
publisher = {ACM},
address = {Seoul, Republic of Korea},
series = {CHI EA '15},
abstract = {Scientists from the Climate, Ocean and Sea Ice Modeling Team (COSIM) at the Los Alamos National Laboratory (LANL) are interested in gaining a deeper understanding of three primary ocean currents: the Gulf Stream, the Kuroshio Current, and the Agulhas Current & Retroflection. To address these needs, visual artist Francesca Samsel teamed up with experts from the areas of computer science, climate science, statistics, and perceptual science. By engaging an artist specializing in color, we created colormaps that provide the ability to see greater detail in these high-resolution datasets. The new colormaps applied to the POP dataset enabled scientists to see areas of interest unclear using standard colormaps. Improvements in the perceptual range of color allowed scientists to highlight structures within specific ocean currents. Work with the COSIM team members drove development of nested colormaps which provide further detail to the scientists.},
note = {LA-UR-15-20105},
keywords = {climate science, color perception, color theory, colormaps, high-resolution datasets, scientific visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
Carr, Hamish; Sewell, Christopher; Lo, Li-Ta; james Ahrens,
Hybrid Data-Parallel Contour Tree Computation Proceedings Article
In: 2015, (LA-UR-15-24759).
Abstract | Links | BibTeX | Tags: and object reppresentations, computational geometry and object modeling, contour tree, data-parallel, gpu, multi-core, nvidia thrust, simulation output analysis, solid, surface, topological analysis
@inproceedings{Carr2015,
title = {Hybrid Data-Parallel Contour Tree Computation},
author = {Hamish Carr and Christopher Sewell and Li-Ta Lo and james Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/HybridData-ParallelContourTreeComputaion.pdf},
year = {2015},
date = {2015-01-01},
number = {LA-UR-15-24759},
institution = {Los Alamos National Laboratory},
abstract = {As data sets increase in size beyond the petabyte, it is increasingly important to have automated methods for data analysis and visualization. While topological analysis tools such as the contour tree and Morse-Smale complex are now well established, there is still a shortage of efficient parallel algorithms for their computation, in particular for massively data-parallel computation on a SIMD model. We report the first data-parallel algorithm for computing the fully augmented contour tree, using a quantized computation model. We then extend this to provide a hybrid data-parallel / distributed algorithm allowing scaling beyond a single GPU or CPU, and provide results for its computation. Our implementation uses the portable data-parallel primitives provided by Nvidia’s Thrust library, allowing us to compile our same code for both GPUs and multi-core CPUs.},
note = {LA-UR-15-24759},
keywords = {and object reppresentations, computational geometry and object modeling, contour tree, data-parallel, gpu, multi-core, nvidia thrust, simulation output analysis, solid, surface, topological analysis},
pubstate = {published},
tppubtype = {inproceedings}
}
Woodring, Jonathan; Petersen, Mark; Schmeiber, Andre; Patchett, John; Ahrens, James; Hagen, Hans
In Situ Eddy Analysis in a High-Resolution Ocean Climate Model Proceedings Article
In: IEEE, 2015, (LA-UR-pending).
Abstract | Links | BibTeX | Tags: climate modeling, collaborative development, feature analysis, feature extraction, high performance computing, In situ analysis, mesoscale eddies, ocean modeling, online analysis, revision control, simulation, software engineering, supercomputing
@inproceedings{Woodring2015,
title = {In Situ Eddy Analysis in a High-Resolution Ocean Climate Model},
author = {Jonathan Woodring and Mark Petersen and Andre Schmeiber and John Patchett and James Ahrens and Hans Hagen},
url = {http://ieeexplore.ieee.org/document/7192723/},
year = {2015},
date = {2015-01-01},
publisher = {IEEE},
abstract = {An eddy is a feature associated with a rotating body of fluid, surrounded by a ring of shearing fluid. In the ocean, eddies are 10 to 150 km in diameter, are spawned by boundary currents and baroclinic instabilities, may live for hundreds of days, and travel for hundreds of kilometers. Eddies are important in climate studies because they transport heat, salt, and nutrients through the world’s oceans and are vessels of biological productivity. The study of eddies in global ocean-climate models requires large-scale, high-resolution simulations. This poses a problem for feasible (timely) eddy analysis, as ocean simulations generate massive amounts of data, causing a bottleneck for traditional analysis workflows. To enable eddy studies, we have developed an in situ workflow for the quantitative and qualitative analysis of MPAS-Ocean, a high-resolution ocean climate model, in collaboration with the ocean model research and development process. Planned eddy analysis at high spatial and temporal resolutions will not be possible with a post- processing workflow due to various constraints, such as storage size and I/O time, but the in situ workflow enables it and scales well to ten-thousand processing elements.},
note = {LA-UR-pending},
keywords = {climate modeling, collaborative development, feature analysis, feature extraction, high performance computing, In situ analysis, mesoscale eddies, ocean modeling, online analysis, revision control, simulation, software engineering, supercomputing},
pubstate = {published},
tppubtype = {inproceedings}
}
Bryan, Christopher; Wu, Xue; Mniszewski, Susan; Ma, Kwan-Liu
Integrating predictive analytics into a spatiotemporal epidemic simulation Proceedings Article
In: 2015 IEEE Conference on Visual Analytics Science and Technology, VAST 2015, Chicago, IL, USA, October 25-30, 2015, pp. 17–24, 2015, (LA-UR-15-24873).
Abstract | Links | BibTeX | Tags: Epidemic Visualization, Predictive Modeling, Spatial-Temporal Systems, Visual analytics
@inproceedings{DBLP:conf/ieeevast/BryanWMM15,
title = {Integrating predictive analytics into a spatiotemporal epidemic simulation},
author = {Christopher Bryan and Xue Wu and Susan Mniszewski and Kwan-Liu Ma},
url = {http://dx.doi.org/10.1109/VAST.2015.7347626},
doi = {10.1109/VAST.2015.7347626},
year = {2015},
date = {2015-01-01},
booktitle = {2015 IEEE Conference on Visual Analytics Science and Technology, VAST 2015, Chicago, IL, USA, October 25-30, 2015},
pages = {17--24},
crossref = {DBLP:conf/ieeevast/2015},
abstract = {The Epidemic Simulation System (EpiSimS) is a scalable, complex modeling tool for analyzing disease within the United States. Due to its high input dimensionality, time requirements, and resource constraints, simulating over the entire parameter space is unfeasible. One solution is to take a granular sampling of the input space and use simpler predictive models (emulators) in between. The quality of the implemented emulator depends on many factors: its robustness, sophistication, configuration settings, and suitability to the input data. Visual analytics (VA) can be leveraged to provide guidance and understanding to the user. In this paper, we have implemented a novel VA interface and workflow for emulator building and use. We introduce a workflow to build emulators, make predictions, and then analyze the results. Our prediction process first predicts temporal time series, and uses these to derive predicted spatial densities. Integrated into the EpiSimS framework, we target users who are non-experts at statistical modeling. This approach allows for a high level of analysis into the state of the built emulators and their resultant predictions. We present our workflow, models and the associated VA system, and evaluate the overall utility with feedback from EpiSimS scientists.},
note = {LA-UR-15-24873},
keywords = {Epidemic Visualization, Predictive Modeling, Spatial-Temporal Systems, Visual analytics},
pubstate = {published},
tppubtype = {inproceedings}
}
Sewell, Christopher; Heitmann, Katrin; Finkel, Hal; Zagaris, George; Parete-Koon, Suzanne; Fasel, Patricia; Pope, Adrian; Frontiere, Nicholas; Lo, Li-Ta; Messer, Bronson; Habib, Salman; Ahrens, James
Large-Scale Compute-Intensive Analysis via a Combined In-situ and Co-scheduling Workflow Approach Proceedings Article
In: Proceedings of the International Conference for High Performance Computing, Networking, Storage and Analysis, IEEE Press, Austin, Texas, 2015, (LA-UR-15-22830).
Abstract | Links | BibTeX | Tags: analysis, co-scheduling, compute-intensive, in-situ, large-scale, workflow
@inproceedings{Sewell:2015b,
title = {Large-Scale Compute-Intensive Analysis via a Combined In-situ and Co-scheduling Workflow Approach},
author = {Christopher Sewell and Katrin Heitmann and Hal Finkel and George Zagaris and Suzanne Parete-Koon and Patricia Fasel and Adrian Pope and Nicholas Frontiere and Li-Ta Lo and Bronson Messer and Salman Habib and James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/Large-ScaleCompute-IntensiveAnalysisViaACombinedIn-situAndCo-schedulingWorkflowApproach.pdf},
year = {2015},
date = {2015-01-01},
booktitle = {Proceedings of the International Conference for High Performance Computing, Networking, Storage and Analysis},
publisher = {IEEE Press},
address = {Austin, Texas},
series = {SC '15},
abstract = {Large-scale simulations can produce hundreds of terabytes to peta- bytes of data, complicating and limiting the efficiency of work- flows. Traditionally, outputs are stored on the file system and an- alyzed in post-processing. With the rapidly increasing size and complexity of simulations, this approach faces an uncertain future. Trending techniques consist of performing the analysis in-situ, uti- lizing the same resources as the simulation, and/or off-loading sub- sets of the data to a compute-intensive analysis system. We intro- duce an analysis framework developed for HACC, a cosmological N-body code, that uses both in-situ and co-scheduling approaches for handling petabyte-scale outputs. We compare different anal- ysis set-ups ranging from purely off-line, to purely in-situ to in- situ/co-scheduling. The analysis routines are implemented using the PISTON/VTK-m framework, allowing a single implementation of an algorithm that simultaneously targets a variety of GPU, multi- core, and many-core architectures.},
note = {LA-UR-15-22830},
keywords = {analysis, co-scheduling, compute-intensive, in-situ, large-scale, workflow},
pubstate = {published},
tppubtype = {inproceedings}
}
Heitmann, Katrin; Frontiere, Nicholas; Sewell, Christopher; Habib, Salman; Pope, Adrian; Finkel, Hal; Rizzi, Silvio; Insley, Joe; Bhattacharya, Suman
The Q Continuum Simulation: Harnessing the Power of GPU Accelerated Supercomputers Journal Article
In: 2015, (LA-UR-15-28271).
Abstract | Links | BibTeX | Tags: cosmology, gpu, n-body
@article{Heitmann:2015a,
title = {The Q Continuum Simulation: Harnessing the Power of GPU Accelerated Supercomputers},
author = {Katrin Heitmann and Nicholas Frontiere and Christopher Sewell and Salman Habib and Adrian Pope and Hal Finkel and Silvio Rizzi and Joe Insley and Suman Bhattacharya},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/11/TheQContinuumSimulationHarnessingThePowerOfGPUAcceleratedSupercomputers2.pdf},
year = {2015},
date = {2015-01-01},
publisher = {To appear in The Astrophysical Journal},
abstract = {Modeling large-scale sky survey observations is a key driver for the continuing development of high resolution, large-volume, cosmological simulations. We report the first results from the 'Q Continuum' cosmological N-body simulation run carried out on the GPU-accelerated supercomputer Titan. The simulation encompasses a volume of (1300 Mpc)^3 and evolves more than half a trillion particles, leading to a particle mass resolution of ~1.5 X 10^8 M_sun. At this mass resolution, the Q Continuum run is currently the largest cosmology simulation available. It enables the construction of detailed synthetic sky catalogs, encompassing different modeling methodologies, including semi-analytic modeling and sub-halo abundance matching in a large, cosmological volume. Here we describe the simulation and outputs in detail and present first results for a range of cosmological statistics, such as mass power spectra, halo mass functions, and halo mass-concentration relations for different epochs. We also provide details on challenges connected to running a simulation on almost 90% of Titan, one of the fastest supercomputers in the world, including our usage of Titan's GPU accelerators.},
note = {LA-UR-15-28271},
keywords = {cosmology, gpu, n-body},
pubstate = {published},
tppubtype = {article}
}
Sewell, Christopher; Lo, Li-Ta; Heitmann, Katrin; Habib, Salman; Ahrens, James
Utilizing Many-Core Accelerators for Halo and Center Finding within a Cosmology Simulation Proceedings Article
In: Proceedings of the IEEE Symposium on Large Data Analysis and Visualization, IEEE Press, Chicago, Illinois, 2015, (LA-UR-15-22202).
Abstract | Links | BibTeX | Tags: cosmology, halo finding, many-core, Programming Techniques
@inproceedings{Sewell:2015a,
title = {Utilizing Many-Core Accelerators for Halo and Center Finding within a Cosmology Simulation},
author = {Christopher Sewell and Li-Ta Lo and Katrin Heitmann and Salman Habib and James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/UtilizingMany-CoreAcceleratorsForHaloAndCenterFindingWithinACosmologySimulation.pdf},
year = {2015},
date = {2015-01-01},
booktitle = {Proceedings of the IEEE Symposium on Large Data Analysis and Visualization},
publisher = {IEEE Press},
address = {Chicago, Illinois},
series = {LDAV '15},
abstract = {Efficiently finding and computing statistics about “halos” (regions of high density) are essential analysis steps for N-body cosmology simulations. However, in state-of-the-art simulation codes, these analysis operators do not currently take advantage of the shared- memory data-parallelism available on multi-core and many-core ar- chitectures. The Hybrid / Hardware Accelerated Cosmology Code (HACC) is designed as an MPI+X code, but the analysis operators are parallelized only among MPI ranks, because of the difficulty in porting different X implementations (e.g., OpenMP, CUDA) across all architectures on which it is run. In this paper, we present portable data-parallel algorithms for several variations of halo find- ing and halo center finding algorithms. These are implemented with the PISTON component of the VTK-m framework, which uses Nvidia’s Thrust library to construct data-parallel algorithms that al- low a single implementation to be compiled to multiple backends to target a variety of multi-core and many-core architectures. Fi- nally, we compare the performance of our halo and center find- ing algorithms against the original HACC implementations on the Moonlight, Stampede, and Titan supercomputers. The portability of Thrust allowed the same code to run efficiently on each of these architectures. On Titan, the performance improvements using our code have enabled halo analysis to be performed on a very large data set (81923 particles across 16,384 nodes of Titan) for which analysis using only the existing CPU algorithms was not feasible.},
note = {LA-UR-15-22202},
keywords = {cosmology, halo finding, many-core, Programming Techniques},
pubstate = {published},
tppubtype = {inproceedings}
}
Samsel, Francesca; Petersen, Mark; Abram, Greg; Turton, Terece; Rogers, David; Ahrens, James
Visualization of ocean currents and eddies in a high-resolution global ocean-climate model Proceedings Article
In: Proceedings of the International Conference on High Performance Computing, Networking, Storage and Analysis 2015, 2015, (LA-UR-15-20105).
Abstract | Links | BibTeX | Tags: oceanography simulation and modeling, visualization
@inproceedings{samsel2015visualization,
title = {Visualization of ocean currents and eddies in a high-resolution global ocean-climate model},
author = {Francesca Samsel and Mark Petersen and Greg Abram and Terece Turton and David Rogers and James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2017/08/VisualizationofOceanCurrentsandEddiesinaHigh-resoutionOceanModel.pdf},
year = {2015},
date = {2015-01-01},
booktitle = {Proceedings of the International Conference on High Performance Computing, Networking, Storage and Analysis 2015},
abstract = {Climate change research relies on models to better understand and predict the complex, interdependent processes that affect the atmosphere, ocean, and land. These models are computationally intensive and produce terabytes to petabytes of data. Visualization and analysis is increasingly difficult, yet is critical to gain scientific insights from large simulations. The recently-developed Model for Prediction Across Scales-Ocean (MPAS-Ocean) is designed to investigate climate change at global high-resolution (5 to 10 km grid cells) on high performance computing platforms. In the accompanying video, we use state-of-the-art visualization techniques to explore the physical processes in the ocean relevant to climate change. These include heat transport, turbulence and eddies, weakening of the meridional overturning circulation, and interaction between a warming ocean and Antarctic ice shelves. The project exemplifies the benefits of tight collaboration among scientists, artists, computer scientists, and visualization specialists.},
note = {LA-UR-15-20105},
keywords = {oceanography simulation and modeling, visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
Adhinarayanan, Vignesh; Pakin, Scott; Rogers, David; Feng, Wu-chun; Ahrens, James
Performance, Power, and Energy of In-Situ and Post-Processing Visualization: A Case Study in Climate Simulation Proceedings Article
In: 2015, (Best Research Poster Finalist, LA-UR-15-26284).
Links | BibTeX | Tags: in-situ visualization
@inproceedings{Adhinarayanan2015climate,
title = {Performance, Power, and Energy of In-Situ and Post-Processing Visualization: A Case Study in Climate Simulation},
author = {Vignesh Adhinarayanan and Scott Pakin and David Rogers and Wu-chun Feng and James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2017/08/PerformancePowerandEnergyofIn-SituandPost-ProcessingVisualization-ACaseStudyinClimateSimulation.pdf},
year = {2015},
date = {2015-01-01},
journal = {2015 ACM/IEEE International Conference for High Performance Computing, Networking, Storage, and Analysis (SC)},
note = {Best Research Poster Finalist, LA-UR-15-26284},
keywords = {in-situ visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
Bujack, Roxana; Joy, Kenneth
Lagrangian Representations of Flow Fields with Parameter Curves Proceedings Article
In: Large Data Analysis and Visualization (LDAV), 2015 IEEE 4th Symposium on, IEEE 2015.
@inproceedings{bujack2015lagrangian,
title = {Lagrangian Representations of Flow Fields with Parameter Curves},
author = {Roxana Bujack and Kenneth Joy},
url = {https://datascience.dsscale.org/wp-content/uploads/2019/02/LagrangianRepresentationsofFlowFieldswithParameterCurves.pdf},
year = {2015},
date = {2015-01-01},
booktitle = {Large Data Analysis and Visualization (LDAV), 2015 IEEE 4th Symposium on},
organization = {IEEE},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Bujack, Roxana; Scheuermann, Gerik; Hitzer, Eckhard
Demystification of the geometric Fourier transforms and resulting convolution theorems Journal Article
In: Mathematical Methods in the Applied Sciences, 2015.
@article{bujack2015demystification,
title = {Demystification of the geometric Fourier transforms and resulting convolution theorems},
author = {Roxana Bujack and Gerik Scheuermann and Eckhard Hitzer},
url = {https://datascience.dsscale.org/wp-content/uploads/2019/02/DemystificationofthegeometricFouriertransformsandresultingconvolutiontheorems.pdf},
year = {2015},
date = {2015-01-01},
journal = {Mathematical Methods in the Applied Sciences},
publisher = {Wiley Online Library},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Bujack, Roxana; Kasten, Jens; Natarajan, Vijay; Scheuermann, Gerik; Joy, Kenneth
Clustering Moment Invariants to Identify Similarity within 2D Flow Fields Proceedings Article
In: Bertini, E.; Kennedy, J.; Puppo, E. (Ed.): Eurographics Conference on Visualization (EuroVis) - Short Papers, The Eurographics Association, 2015.
@inproceedings{eurovisshort.20151121,
title = {Clustering Moment Invariants to Identify Similarity within 2D Flow Fields},
author = {Roxana Bujack and Jens Kasten and Vijay Natarajan and Gerik Scheuermann and Kenneth Joy},
editor = {E. Bertini and J. Kennedy and E. Puppo},
url = {https://datascience.dsscale.org/wp-content/uploads/2019/02/ClusteringMomentInvariantstoIdentifySimilaritywithin2DFlowFields.pdf},
doi = {10.2312/eurovisshort.20151121},
year = {2015},
date = {2015-01-01},
booktitle = {Eurographics Conference on Visualization (EuroVis) - Short Papers},
publisher = {The Eurographics Association},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Bujack, Roxana; Kasten, Jens; Hotz, Ingrid; Scheuermann, Gerik; Hitzer, Eckhard
Moment Invariants for 3D Flow Fields Using Normalization Conference
IEEE Pacific Visualization Symposium, PacificVis 2015 in Hangzhou, China, 2015.
@conference{BKHSH15,
title = {Moment Invariants for 3D Flow Fields Using Normalization},
author = {Roxana Bujack and Jens Kasten and Ingrid Hotz and Gerik Scheuermann and Eckhard Hitzer},
url = {https://datascience.dsscale.org/wp-content/uploads/2019/02/MomentInvariantsfor3DFlowFieldsUsingNormalization.pdf},
year = {2015},
date = {2015-01-01},
booktitle = {IEEE Pacific Visualization Symposium, PacificVis 2015 in Hangzhou, China},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Bujack, Roxana; Hotz, Ingrid; Scheuermann, Gerik; Hitzer, Eckhard
Moment Invariants for 2D Flow Fields via Normalization in Detail Journal Article
In: IEEE Transactions on Visualization and Computer Graphics (TVCG), vol. 21, no. 8, pp. 916–929, 2015.
@article{BHSH15,
title = {Moment Invariants for 2D Flow Fields via Normalization in Detail},
author = {Roxana Bujack and Ingrid Hotz and Gerik Scheuermann and Eckhard Hitzer},
url = {https://datascience.dsscale.org/wp-content/uploads/2019/02/MomentInvariantsfor2DFlowFieldsviaNormalizationinDetail.pdf},
doi = {10.1109/TVCG.2014.2369036},
year = {2015},
date = {2015-01-01},
journal = {IEEE Transactions on Visualization and Computer Graphics (TVCG)},
volume = {21},
number = {8},
pages = {916--929},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Ahrens, James
Increasing Scientific Data Insights about Exascale Class Simulations under Power and Storage Constraints Journal Article
In: vol. 35, no. 2, pp. 8–11, 2015, ISSN: 0272-1716 (print), 1558-1756 (electronic), (LA-UR-pending).
Abstract | Links | BibTeX | Tags: exascale, increasing scientific data insights
@article{Ahrens:2015:ISD,
title = {Increasing Scientific Data Insights about Exascale Class Simulations under Power and Storage Constraints},
author = {James Ahrens},
url = {https://datascience.dsscale.org/wp-content/uploads/2016/06/IncreasingScientificDataInsightsAboutExascaleClassSimulationsUnderPowerAndStorageConstrains.pdf},
issn = {0272-1716 (print), 1558-1756 (electronic)},
year = {2015},
date = {2015-00-01},
volume = {35},
number = {2},
pages = {8--11},
abstract = {Over the past three decades, supercomputing systems have progressed to compute the results of extremely accurate scientific simulations. These simulations help us understand complex real-world phenomena such as our climate, energy sources, and the progression of natual disasters. Additionally, computing power supports the computation of hither-quality simulations, and that in turn provides higher fidelity results. Using the number of floating-point operations per second (flops) as a measure of progress, we have progressed through terascale machines that compute 10**12 flops to petascale machines that compute 10**15 flops. A number of open source efforts provide a robust scalable visualization and analysis capability such as ParaView (www.paraview.org) and Visit (https://visit.llnl.gov) for these levels of performance. These tools traditionally focus on a postprocessing approach. That is, during a simulation run, representative results are written to storage for later visualization. ...continued in full paper below.},
note = {LA-UR-pending},
keywords = {exascale, increasing scientific data insights},
pubstate = {published},
tppubtype = {article}
}
2014
Sewell, Christopher
Streaming Data-Parallel Algorithms Enable Cosmology Data Analysis for Large Halos Presentation
31.12.2014, (LA-UR-14-29638).
Abstract | Links | BibTeX | Tags: cosmology, data parallel
@misc{Sewell2014,
title = {Streaming Data-Parallel Algorithms Enable Cosmology Data Analysis for Large Halos},
author = {Christopher Sewell },
url = {http://datascience.dsscale.org/wp-content/uploads/2016/08/Streaming_Data-Parallel_Algorithms_Enable_Cosmology_Data_Analysis_for_Large_Halos.pdf},
year = {2014},
date = {2014-12-31},
abstract = {This presentation given by Christopher Sewell describes how streaming data-parallel algorithms have enabled cosmology data analysis for large halos.},
note = {LA-UR-14-29638},
keywords = {cosmology, data parallel},
pubstate = {published},
tppubtype = {presentation}
}
Ahrens, James; Jourdain, Sebastien; O'Leary, Patrick; Patchett, John; Rogers, David; Petersen, Mark
An Image-based Approach to Extreme Scale In Situ Visualization and Analysis Presentation
22.11.2014, (LA-UR-14-26864).
Abstract | Links | BibTeX | Tags: cinema, in situ
@misc{Ahrens2014,
title = {An Image-based Approach to Extreme Scale In Situ Visualization and Analysis},
author = {James Ahrens and Sebastien Jourdain and Patrick O'Leary and John Patchett and David Rogers and Mark Petersen},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/08/ImageBasedApproachSC2014v3.pdf
},
year = {2014},
date = {2014-11-22},
abstract = {This presentation given at SC14 by Los Alamos and Kitware scientists describes a new image based approach to extreme scale in-situ visualization and ayalysis.},
note = {LA-UR-14-26864},
keywords = {cinema, in situ},
pubstate = {published},
tppubtype = {presentation}
}
Widanagamaachchi, Wathsala; Bremer, Peer-Timo; Sewell, Christopher; Lo, Li-ta; Ahrens, James; Pascucci, Valerio
Data-Parallel Halo Finding with Variable Linking Lengths Proceedings Article
In: 2014, (LA-UR-14-23700).
Abstract | Links | BibTeX | Tags: clustering, cosomology, halo
@inproceedings{Widanagamaachchi2014,
title = {Data-Parallel Halo Finding with Variable Linking Lengths},
author = {Wathsala Widanagamaachchi and Peer-Timo Bremer and Christopher Sewell and Li-ta Lo and James Ahrens and Valerio Pascucci},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/Data-ParallelHaloFindingWithVariableLinkingLenghts.pdf},
year = {2014},
date = {2014-11-01},
abstract = {State-of-the-art cosmological simulations regularly contain billions of particles, providing scientists the opportunity to study the evolution of the Universe in great detail. However, the rate at which these simulations generate data severely taxes existing analysis techniques. Therefore, developing new scalable alternatives is essential for continued scientific progress. Here, we present a dataparallel, friends-of-friends halo finding algorithm that provides unprecedented flexibility in the analysis by extracting multiple linking lengths. Even for a single linking length, it is as fast as the existing techniques, and is portable to multi-threaded many-core systems as well as co-processing resources. Our system is implemented using PISTON and is coupled to an interactive analysis environment used to study halos at different linking lengths and track their evolution over time.},
note = {LA-UR-14-23700},
keywords = {clustering, cosomology, halo},
pubstate = {published},
tppubtype = {inproceedings}
}
Kares, Robert J.
Experiments at Scale with In-Situ Visualization Using ParaView/Catalyst in RAGE Technical Report
2014, (LA-UR-14-28528).
Abstract | Links | BibTeX | Tags: catalyst, ParaView
@techreport{Kares2014,
title = {Experiments at Scale with In-Situ Visualization Using ParaView/Catalyst in RAGE},
author = {Robert J. Kares},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/ExperimentsAtScaleWithIn-SituVisualizationUsingParaViewCatalystInRAGE.pdf},
year = {2014},
date = {2014-10-14},
abstract = {In this paper I describe some numerical experiments performed using the ParaView/Catalyst in- situ visualization infrastructure deployed in the Los Alamos RAGE radiation-hydrodynamics code to produce images from a running large scale 3D ICF simulation on the Cielo supercomputer at Los Alamos. The detailed procedures for the creation of the visualizations using ParaView/Catalyst are discussed and several images sequences from the ICF simulation problem produced with the in-situ method are presented. My impressions and conclusions concerning the use of the in-situ visualization method in RAGE are discussed.},
note = {LA-UR-14-28528},
keywords = {catalyst, ParaView},
pubstate = {published},
tppubtype = {techreport}
}
Aldrich, Garrett; Gimenez, Alfredo; Oskin, Michael; Strelitz, Richard; Woodring, Jonathan; Kellogg, Louise H; Hamann, Bernd
Curvature-Based Crease Surfaces for Wave Visualization Proceedings Article
In: Citeseer, 2014, (LA-UR-pending).
Abstract | Links | BibTeX | Tags: Visible line/surface algorithms
@inproceedings{Aldrich2014,
title = {Curvature-Based Crease Surfaces for Wave Visualization},
author = {Garrett Aldrich and Alfredo Gimenez and Michael Oskin and Richard Strelitz and Jonathan Woodring and Louise H Kellogg and Bernd Hamann},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/Curvature-BasesCreaseSurfacesForWaveVisualization.pdf},
year = {2014},
date = {2014-10-08},
publisher = {Citeseer},
abstract = {The visualization and analysis of complex fields often requires identifying and extracting domain specific fea- tures. Through a collaboration with geophysicists we extend previous work on crease surfaces with a new and complimentary definition: extremas in principal surface curvature rather than scalar value. Using this definition, we visualize the resulting surfaces which correspond to individual wave fronts. As these wave fronts propagate through a control structure (medium), they undergo changes in intensity, shape and topology due to reflection, refraction and interference. We demonstrate our ability to effectively visualize these phenomena in complex data sets including a large-scale simulation of a hypothetical earthquake along the San Andreas fault in Southern California.},
note = {LA-UR-pending},
keywords = {Visible line/surface algorithms},
pubstate = {published},
tppubtype = {inproceedings}
}
Zeyen, Max
Material Science - Crystal Grain Visualization Presentation
01.10.2014, (LA-UR-pending).
Abstract | Links | BibTeX | Tags: material science, visualization
@misc{Zeyen2014,
title = {Material Science - Crystal Grain Visualization},
author = {Max Zeyen},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/08/Material_Science-Crystal_Grain_Visualization.pdf},
year = {2014},
date = {2014-10-01},
abstract = {This presentation summarizes Max Zeyen's material science project on crystal grain visualization.},
note = {LA-UR-pending},
keywords = {material science, visualization},
pubstate = {published},
tppubtype = {presentation}
}
Widanagamaachchi, Wathsala
In-situ Visualization and Analysis of Plasma Surface Interaction Simulations Presentation
01.10.2014, (LA-UR-pending).
Abstract | Links | BibTeX | Tags: in situ, plasma-surface interactions, visualization
@misc{Widanagamaachchi2014,
title = {In-situ Visualization and Analysis of Plasma Surface Interaction Simulations},
author = {Wathsala Widanagamaachchi
},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/08/In-situ_Visualization_and_Analysis_Of_Plasma_Surface_Interaction_Simulations.pptx},
year = {2014},
date = {2014-10-01},
abstract = {This presentation summarized Wathsala Widanagamaachchi's in-situ visualization and analysis summer 2014 project.},
note = {LA-UR-pending},
keywords = {in situ, plasma-surface interactions, visualization},
pubstate = {published},
tppubtype = {presentation}
}
Sewell, Christopher; Lo, Li-ta; Francois, Marianne; Ahrens, James
Data-Parallel Programming with PISTON and PINION Presentation
30.08.2014, (LA-UR-14-26186).
Abstract | Links | BibTeX | Tags: data parallel, PINION, PISTON
@misc{Sewell2014b,
title = {Data-Parallel Programming with PISTON and PINION},
author = {Christopher Sewell and Li-ta Lo and Marianne Francois and James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/08/Data-Parallel_Programming_with_PISTON_and_PINION.pdf},
year = {2014},
date = {2014-08-30},
abstract = {This presentation provides an introduction to data-parallel programming, NVIDIA's Thrust library, and our PISTON and PINION projects, which use this programming model to implement visualization and analysis operators, as well as simulation code.},
note = {LA-UR-14-26186},
keywords = {data parallel, PINION, PISTON},
pubstate = {published},
tppubtype = {presentation}
}
Sewell, Christopher; Heitmann, Katrin; Lo, Li-Ta; Habib, Salman; Ahrens, James
Portable Parallel Halo and Center Finders for HACC Presentation
31.07.2014, (LA-UR-14-25437).
Abstract | Links | BibTeX | Tags: halo finding, PISTON, VTK-m
@misc{Sewell2014b,
title = {Portable Parallel Halo and Center Finders for HACC},
author = {Christopher Sewell and Katrin Heitmann and Li-Ta Lo and Salman Habib and James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/08/Portable_Parallel_Halo_and_Center_Finders_for_HACC.pdf},
year = {2014},
date = {2014-07-31},
abstract = {This presentation describes our work on finding halos and halo centers for the HACC cosmology code using our portable, data-parallel framework, which allows us to run on accelerators such as GPUs, providing significant speed-up. This work, which is part of the SDAV VTK-m project, enabled halo analysis to be performed on a very large data set (8192^3 particles across 16,384 nodes on Titan) for which analysis using the traditional CPU algorithms was not feasible.},
note = {LA-UR-14-25437},
keywords = {halo finding, PISTON, VTK-m},
pubstate = {published},
tppubtype = {presentation}
}
Sewell, Christopher; Ahrens, James; Patchett, John
New Data-parallel Algorithms Accelerate Cosmology Data Analysis on GPUs Presentation
30.06.2014, (LA-UR-14-22054).
Abstract | Links | BibTeX | Tags: cosmo, cosmology, data parallel, gpu
@misc{Sewell2014b,
title = {New Data-parallel Algorithms Accelerate Cosmology Data Analysis on GPUs},
author = {Christopher Sewell and James Ahrens and John Patchett},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/08/New_Data-parallel_Algorithms_Accelerate_Cosmology_Data_Analysis_on_GPUs.pdf},
year = {2014},
date = {2014-06-30},
abstract = {This presentation describes how new data-parallel algorithms have accelerated cosmology data analysis on GPUs.},
note = {LA-UR-14-22054},
keywords = {cosmo, cosmology, data parallel, gpu},
pubstate = {published},
tppubtype = {presentation}
}
Patchett, John
In Situ Presentation
17.06.2014, (LA-UR-14-24409).
Abstract | Links | BibTeX | Tags: in situ
@misc{Patchett2014,
title = {In Situ},
author = {John Patchett},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/08/In_Situ.pptx},
year = {2014},
date = {2014-06-17},
abstract = {Scientific visualization and analysis is an essential tool for understanding the large-scale data produced by high performance computer simulations. The Data Science at Scale Team at the Los Alamos National Laboratory (LANL) runs an extensive research and development program covering different computer science topics: graphics, systems, hardware, software, and algorithms to meet the analysis demands created by hero-class simulations and supercomputers. In particular, large-scale simulations produce results that strain storage and network capacity, moving scientific analysis away from post-processing. This requires domain scientists to adopt new analysis workflows that enable them to efficiently test scientific hypotheses from large-scale simulation results. This talk will review selected research topics within the LANL Data Science at Scale Team, with an emphasis on recent results in situ analysis and the delivery of data products in a production computing environment.},
note = {LA-UR-14-24409},
keywords = {in situ},
pubstate = {published},
tppubtype = {presentation}
}
Ahrens, James
Increasing Scientific Data Insights About Exascale Class Simulations Under Power and Storage Constraints Presentation
28.02.2014, (LA-UR-pending).
Abstract | Links | BibTeX | Tags: exascale
@misc{Ahrens2014b,
title = {Increasing Scientific Data Insights About Exascale Class Simulations Under Power and Storage Constraints},
author = {James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/IncreasingScientificDataInsightsAboutExascaleClassSimulationsUnderPowerAndStorageConstrains.pdf
http://www.exascale.org/bdec/},
year = {2014},
date = {2014-02-28},
abstract = {James Ahrens gave an invited talk and led data sessions at the Big Data and Extreme-Scale Computing Conference in Fukuoka, Japan from Feb. 26-28, 2014. The title of Jim's talk was 'Increasing Scientific Data Insights About Exascale Class Simulations Under Power and Storage Constraints'. Jim also described the talk in a podcast interview with HPCWire.},
note = {LA-UR-pending},
keywords = {exascale},
pubstate = {published},
tppubtype = {presentation}
}
Sewell, Christopher; Lo, Li-ta; Ahrens, James
PISTON Presentation
31.01.2014, (LA-UR-14-20028).
Abstract | Links | BibTeX | Tags: data parallel, PINION, PISTON
@misc{Sewell2014b,
title = {PISTON},
author = {Christopher Sewell and Li-ta Lo and James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/08/PISTON.pdf},
year = {2014},
date = {2014-01-31},
abstract = {This set of two guest lectures provides an introductory tutorial to data-parallel programming and NVIDIA's Thrust library, as well as an overview of our research in out PISTON and PINION projects.},
note = {LA-UR-14-20028},
keywords = {data parallel, PINION, PISTON},
pubstate = {published},
tppubtype = {presentation}
}
Nouanesengsy, Boonthanome; Woodring, Jonathan; Patchett, John; Myers, Kary; Ahrens, James
ADR visualization: A generalized framework for ranking large-scale scientific data using Analysis-Driven Refinement Proceedings Article
In: Large Data Analysis and Visualization (LDAV), 2014 IEEE 4th Symposium on, pp. 43–50, IEEE 2014, (LA-UR-pending).
Abstract | Links | BibTeX | Tags: adaptive mesh refinement, ADR, Analysis-Driven Refinement, big data, data triage, focus+context, hardware architecture, large-scale data, parallel processing, picture/image generation, prioritization, scientific data, viewing algorithms
@inproceedings{nouanesengsy2014adr,
title = {ADR visualization: A generalized framework for ranking large-scale scientific data using Analysis-Driven Refinement},
author = {Boonthanome Nouanesengsy and Jonathan Woodring and John Patchett and Kary Myers and James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/ADRVisualization.pdf},
year = {2014},
date = {2014-01-01},
booktitle = {Large Data Analysis and Visualization (LDAV), 2014 IEEE 4th Symposium on},
pages = {43--50},
organization = {IEEE},
abstract = {Prioritization of data is necessary for managing large-scale scien- tific data, as the scale of the data implies that there are only enough resources available to process a limited subset of the data. For ex- ample, data prioritization is used during in situ triage to scale with bandwidth bottlenecks, and used during focus+context visualiza- tion to save time during analysis by guiding the user to impor- tant information. In this paper, we present ADR visualization, a generalized analysis framework for ranking large-scale data using Analysis-Driven Refinement (ADR), which is inspired by Adaptive Mesh Refinement (AMR). A large-scale data set is partitioned in space, time, and variable, using user-defined importance measure- ments for prioritization. This process creates a prioritization tree over the data set. Using this tree, selection methods can generate sparse data products for analysis, such as focus+context visualiza- tions or sparse data sets.},
note = {LA-UR-pending},
keywords = {adaptive mesh refinement, ADR, Analysis-Driven Refinement, big data, data triage, focus+context, hardware architecture, large-scale data, parallel processing, picture/image generation, prioritization, scientific data, viewing algorithms},
pubstate = {published},
tppubtype = {inproceedings}
}
Ahrens, James; Jourdain, Sebastien; O'Leary, Patrick; Patchett, John; Rogers, David; Petersen, Mark
An Image-based Approach to Extreme Scale in Situ Visualization and Analysis Proceedings Article
In: Proceedings of the International Conference for High Performance Computing, Networking, Storage and Analysis, pp. 424–434, IEEE Press, New Orleans, Louisana, 2014, ISBN: 978-1-4799-5500-8, (LA-UR-14-26864).
Abstract | Links | BibTeX | Tags: analysis, cinema, cinemascience, image-based, in-situ, visualization
@inproceedings{Ahrens:2014:IAE:2683593.2683640,
title = {An Image-based Approach to Extreme Scale in Situ Visualization and Analysis},
author = {James Ahrens and Sebastien Jourdain and Patrick O'Leary and John Patchett and David Rogers and Mark Petersen},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/AnImage-basedApproachToExtremeScaleInSituvisualizationAndAnalysis.pdf
http://dx.doi.org/10.1109/SC.2014.40},
doi = {10.1109/SC.2014.40},
isbn = {978-1-4799-5500-8},
year = {2014},
date = {2014-01-01},
booktitle = {Proceedings of the International Conference for High Performance Computing, Networking, Storage and Analysis},
pages = {424--434},
publisher = {IEEE Press},
address = {New Orleans, Louisana},
series = {SC '14},
abstract = {Extreme scale scientific simulations are leading a charge to exascale computation, and data analytics runs the risk of being a bottleneck to scientific discovery. Due to power and I/O constraints, we expect in situ visualization and analysis will be a critical component of these workflows. Options for extreme scale data analysis are often presented as a stark contrast: write large files to disk for interactive, exploratory analysis, or perform in situ analysis to save detailed data about phenomena that a scientists knows about in advance. We present a novel framework for a third option - a highly interactive, image-based approach that promotes exploration of simulation results, and is easily accessed through extensions to widely used open source tools. This in situ approach supports interactive exploration of a wide range of results, while still significantly reducing data movement and storage.},
note = {LA-UR-14-26864},
keywords = {analysis, cinema, cinemascience, image-based, in-situ, visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
Su, Yu; Agrawal, Gagan; Woodring, Jonathan; Myers, Kary; Wendelberger, Joanne; Ahrens, James
Effective and efficient data sampling using bitmap indices Journal Article
In: Cluster Computing, pp. 1-20, 2014, ISSN: 1386-7857, (LA-UR-pending).
Abstract | Links | BibTeX | Tags: big data, Bitmap indexing, data sampling, Multi-resolution, parallel processing
@article{,
title = {Effective and efficient data sampling using bitmap indices},
author = {Yu Su and Gagan Agrawal and Jonathan Woodring and Kary Myers and Joanne Wendelberger and James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/EffectiveAndEfficientDataSamplingUsingBitmapIndeces.pdf},
doi = {10.1007/s10586-014-0360-5},
issn = {1386-7857},
year = {2014},
date = {2014-01-01},
journal = {Cluster Computing},
pages = {1-20},
publisher = {Springer US},
abstract = {With growing computational capabilities of parallel machines, scientific simulations are being performed at finer spatial and temporal scales, leading to a data explosion. The growing sizes are making it extremely hard to store, manage, disseminate, analyze, and visualize these datasets, especially as neither the memory capacity of parallel machines, memory access speeds, nor disk bandwidths are increasing at the same rate as the computing power. Sampling can be an effective technique to address the above challenges, but it is extremely important to ensure that dataset characteristics are preserved, and the loss of accuracy is within acceptable levels. In this paper, we address the data explosion problems by developing a novel sampling approach, and implementing it in a flexible system that supports server-side sampling and data subsetting.We observe that to allowsubsetting over scientific datasets, data repositories are likely to use an indexing technique. Among these techniques, we see that bitmap indexing can not only effectively support subsetting over scientific datasets, but can also help create samples that preserve both value and spatial distributions over scientific datasets. We have developed algorithms for using bitmap indices to sample datasets. We have also shown how only a small amount of additional metadata stored with bitvectors can help assess loss of accuracy with a particular subsampling level. Some of the other properties of this novel approach include: (1) sampling can be flexibly applied to a subset of the original dataset, which may be specified using a valuebased and/or a dimension-based subsetting predicate, and (2) no data reorganization is needed, once bitmap indices have been generated. We have extensively evaluated our method with different types of datasets and applications, and demonstrated the effectiveness of our approach.},
note = {LA-UR-pending},
keywords = {big data, Bitmap indexing, data sampling, Multi-resolution, parallel processing},
pubstate = {published},
tppubtype = {article}
}
Childs, Hank
Research challenges for visualization software Technical Report
2014.
Abstract | Links | BibTeX | Tags: visualization
@techreport{Childs2014,
title = {Research challenges for visualization software},
author = {Hank Childs},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/ResearchChallengesForVisualizationSoftware.pdf},
year = {2014},
date = {2014-01-01},
journal = {IEEE Computer, 46, May 2013},
abstract = {As the visualization research community reorients its software to address upcoming challenges, it must successfully deal with diverse processor architectures, distributed systems, various data sources, massive parallelism, multiple input and output devices, and interactivity.},
keywords = {visualization},
pubstate = {published},
tppubtype = {techreport}
}
Francois, Marianne; Lo, Li-Ta; Sewell, Christopher
VOLUME-OF-FLUID INTERFACE RECONSTRUCTION ALGORITHMS ON NEXT-GENERATION COMPUTER ARCHITECTURES Proceedings Article
In: Proceedings of the ASME, 2014, (LA-UR-14-20777).
Abstract | Links | BibTeX | Tags: COMPUTER ARCHITECTURES, INTERFACE RECONSTRUCTION, PISTON
@inproceedings{francois2014volume,
title = {VOLUME-OF-FLUID INTERFACE RECONSTRUCTION ALGORITHMS ON NEXT-GENERATION COMPUTER ARCHITECTURES},
author = {Marianne Francois and Li-Ta Lo and Christopher Sewell},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/Volume-Of-FluidInterfaceReconstructionAlgorighmsOnNext-GenerationComputerArchitectures.pdf},
year = {2014},
date = {2014-01-01},
booktitle = {Proceedings of the ASME},
abstract = {With the increasing heterogeneity and on-node parallelism of high-performance computing hardware, a major challenge to computational physicists is to work in close collaboration with computer scientists to develop portable and efficient algorithms and software. The objective of our work is to implement a portable code to perform interface reconstruction using NVIDIA’s Thrust library. Interface reconstruction is a technique commonly used in volume tracking methods for simulations of interfacial flows. For that, we have designed a two-dimensional mesh data structure that is easily mapped to the 1D vectors used by Thrust and at the same time is simple to work with using familiar data structures terminology (such as cell, vertices and edges). With this new data structure in place, we have implemented a recursive volume-of-fluid initialization algorithm and a standard piecewise interface reconstruction algorithm. Our interface reconstruction algorithm makes use of a table look-up to easily identify all intersection cases, as this design is efficient on parallel architectures such as GPUs. Finally, we report performance results which show that a single implementation of these algorithms can be compiled to multiple backends (specifically, multi-core CPUs, NVIDIA GPUs, and Intel Xeon Phi coprocessors), making efficient use of the available parallelism on each.},
note = {LA-UR-14-20777},
keywords = {COMPUTER ARCHITECTURES, INTERFACE RECONSTRUCTION, PISTON},
pubstate = {published},
tppubtype = {inproceedings}
}
Sewell, Christopher; Ahrens, James; Patchett, John
SDAV Visualization Area: Highlights at Los Alamos with HACC Technical Report
Los Alamos National Laboratory (LANL) 2014, (LA-UR-14-22054).
Links | BibTeX | Tags: visualization
@techreport{sewell2014sdav,
title = {SDAV Visualization Area: Highlights at Los Alamos with HACC},
author = {Christopher Sewell and James Ahrens and John Patchett},
url = {http://datascience.dsscale.org/wp-content/uploads/2017/09/LA-UR-14-22054.pdf},
year = {2014},
date = {2014-01-01},
institution = {Los Alamos National Laboratory (LANL)},
note = {LA-UR-14-22054},
keywords = {visualization},
pubstate = {published},
tppubtype = {techreport}
}
Bujack, Roxana
Orientation Invariant Pattern Detection in Vector Fields with Clifford Algebra and Moment Invariants PhD Thesis
Department of Computer Science, Leipzig University, Germany, 2014.
@phdthesis{Buj14,
title = {Orientation Invariant Pattern Detection in Vector Fields with Clifford Algebra and Moment Invariants},
author = {Roxana Bujack},
url = {https://datascience.dsscale.org/wp-content/uploads/2019/02/OrientationInvariantPatternDetectioninVectorFieldswithCliffordAlgebraandMomentInvariants.pdf},
year = {2014},
date = {2014-01-01},
school = {Department of Computer Science, Leipzig University, Germany},
keywords = {},
pubstate = {published},
tppubtype = {phdthesis}
}
Bujack, Roxana; Kasten, Jens; Hotz, Ingrid; Scheuermann, Gerik; Hitzer, Eckhard
Moment Invariants for 3D Flow Fields Miscellaneous
2014.
@misc{BKHSH14,
title = {Moment Invariants for 3D Flow Fields},
author = {Roxana Bujack and Jens Kasten and Ingrid Hotz and Gerik Scheuermann and Eckhard Hitzer},
url = {https://datascience.dsscale.org/wp-content/uploads/2019/02/MomentInvariantsfor3DFlowFields.pdf},
year = {2014},
date = {2014-01-01},
booktitle = {poster at IEEE VIS 2014 in Paris, France},
keywords = {},
pubstate = {published},
tppubtype = {misc}
}
Bujack, Roxana; Hotz, Ingrid; Scheuermann, Gerik; Hitzer, Eckhard
Moment Invariants for 2D Flow Fields via Normalization Conference
IEEE Pacific Visualization Symposium, PacificVis 2014 in Yokohama, Japan, 2014.
@conference{BHSH14a,
title = {Moment Invariants for 2D Flow Fields via Normalization},
author = {Roxana Bujack and Ingrid Hotz and Gerik Scheuermann and Eckhard Hitzer},
url = {https://datascience.dsscale.org/wp-content/uploads/2019/02/MomentInvariantsfor2DFlowFieldsviaNormalization.pdf},
year = {2014},
date = {2014-01-01},
booktitle = {IEEE Pacific Visualization Symposium, PacificVis 2014 in Yokohama, Japan},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Bujack, Roxana; Hlawitschka, Mario; Scheuermann, Gerik; Hitzer, Eckhard
Customized TRS Invariants for 2D Vector Fields via Moment Normalization Journal Article
In: Pattern Recognition Letters, vol. 46, pp. 59, 2014.
@article{BHSH14b,
title = {Customized TRS Invariants for 2D Vector Fields via Moment Normalization},
author = {Roxana Bujack and Mario Hlawitschka and Gerik Scheuermann and Eckhard Hitzer},
url = {https://datascience.dsscale.org/wp-content/uploads/2019/02/CustomizedTRSInvariantsfor2DVectorFieldsviaMomentNormalization.pdf},
year = {2014},
date = {2014-01-01},
journal = {Pattern Recognition Letters},
volume = {46},
pages = {59},
chapter = {46},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
2013
Nouanesengsy, Boonthanome; Patchett, John; Ahrens, James; Bauer, Andrew; Chaudhary, Aashish; Geveci, Berk; Miller, Ross; Shipman, Galen; Williams, Dean N
Optimizing File Access Patterns through the Spatio-Temporal Pipeline for Parallel Visualization and Analysis Technical Report
2013, (LA-UR-pending).
Abstract | Links | BibTeX | Tags: Data Analysis, I/O, Modeling, Parallel Analysis, Parallel Techniques, Parallel Visualization, Spatio-Temporal Pipeline, visualization
@techreport{Nouanesengsy2013,
title = {Optimizing File Access Patterns through the Spatio-Temporal Pipeline for Parallel Visualization and Analysis},
author = {Boonthanome Nouanesengsy and John Patchett and James Ahrens and Andrew Bauer and Aashish Chaudhary and Berk Geveci and Ross Miller and Galen Shipman and Dean N Williams},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/OptimizingFileAccessPatternsThroughTheSpatio-TemporalPipelineForParallelVisualizationAndAnalysis.pdf},
year = {2013},
date = {2013-10-13},
abstract = {As computational resources have become more powerful over time, availability of large-scale data has exploded, with datasets greatly increasing their spatial and temporal resolutions. For many years now, I/O read time has been recognized as the primary bottleneck for parallel visualization and analysis of large-scale data. Read times ultimately depends on how the file is stored and the file access pattern used to read the file. In this paper, we introduce a model which can estimate the read time for a file stored in a parallel filesystem when given the file access pattern. The type of parallel decomposition used directly dictates what the file access pattern will be. The spatio-temporal pipeline is used to give greater flexibility to the file access pattern used. The spatio-temporal pipeline combines both spatial and temporal parallelism to create a parallel decomposition for a task. Within the spatio-temporal pipeline, all available processes are divided into groups called time compartments. Temporal parallelism is utilized as different timesteps are independently processed by separate time compartments, and spatial parallelism is used to divide each timestep over all processes within a time compartment. The ratio between spatial and temporal parallelism is controlled by adjusting the size of a time compartment. Using the model, we were able to configure the spatio-temporal pipeline to create optimized read access patterns, resulting in a speedup factor of approximately 400 over traditional file access patterns.},
note = {LA-UR-pending},
keywords = {Data Analysis, I/O, Modeling, Parallel Analysis, Parallel Techniques, Parallel Visualization, Spatio-Temporal Pipeline, visualization},
pubstate = {published},
tppubtype = {techreport}
}
Lo, Li-Ta
Visualizing PSI Simulation with ParaView Presentation
04.10.2013, (LA-UR-13-27631).
Abstract | Links | BibTeX | Tags: ParaView, Plasma Surface Interaction
@misc{Lo2013,
title = {Visualizing PSI Simulation with ParaView},
author = {Li-Ta Lo},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/08/Visualizing_PSI_Simulation_with_ParaView.pdf
http://datascience.dsscale.org/wp-content/uploads/2016/08/freeZ5400all.dump
http://datascience.dsscale.org/wp-content/uploads/2016/08/freeZ5300all.dump},
year = {2013},
date = {2013-10-04},
abstract = {This presentation describes the use of ParaView for a Plasma Surface Interaction (PSI) simulation. Two sample datasets from the Large-scale Atomic/Molecular Massively Parallel Simulator (LAMMPS) are included below.},
note = {LA-UR-13-27631},
keywords = {ParaView, Plasma Surface Interaction},
pubstate = {published},
tppubtype = {presentation}
}
Patchett, John; Ahrens, James; Nouanesengsy, Boonthanome; Fasel, Patricia; O'leary, Patrick; Sewell, Christopher; Woodring, Jonathan; Mitchell, Christopher; Lo, Li-Ta; Myers, Kary; Wendelberger, Joanne; Canada, Curt; Daniels, Marcus; Abhold, Hilary; Rockefeller, Gabe
Case Study of In Situ Data Analysis in ASC Integrated Codes Presentation
04.09.2013, (LA-UR-13-26599).
Abstract | Links | BibTeX | Tags: in situ
@misc{Patchett2013,
title = {Case Study of In Situ Data Analysis in ASC Integrated Codes},
author = {John Patchett and James Ahrens and Boonthanome Nouanesengsy and Patricia Fasel and Patrick O'leary and Christopher Sewell and Jonathan Woodring and Christopher Mitchell and Li-Ta Lo and Kary Myers and Joanne Wendelberger and Curt Canada and Marcus Daniels and Hilary Abhold and Gabe Rockefeller},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/LANLCSSEL2CaseStudyOfInSituDataAnalysisInASCIntegratedCodes.pdf},
year = {2013},
date = {2013-09-04},
abstract = {This talk presents the overview of the 2013 CSSE ASC L2 Milestone: Case Study of In Situ Data Analysis in ASC Integrated Codes. The talk has 3 parts: An introduction to in situ analysis/visualization, a demonstration of paraview catalyst applied to xRage with timings, and a detail of the minor deliverables from the description of the milestone.},
note = {LA-UR-13-26599},
keywords = {in situ},
pubstate = {published},
tppubtype = {presentation}
}
Brislawn, Christopher M.
Group-theoretic structure of linear phase multirate filter banks Journal Article
In: IEEE Transactions on Information Theory, vol. 59, no. 9, 2013, (LA-UR-12-20858).
Abstract | Links | BibTeX | Tags: Filter bank, free product, group, group lifting structure, JPEG 2000., Lifting, Linear phase filter, Polyphase matrix, semidirect product, Unique factorization, Wavelet
@article{Bris:13b:Group-theoretic-structure,
title = {Group-theoretic structure of linear phase multirate filter banks},
author = {Christopher M. Brislawn},
url = {https://datascience.dsscale.org/wp-content/uploads/2016/06/Group-TheoreticStructureOfLinearPhaseMultirateFilterBanks.pdf},
year = {2013},
date = {2013-08-06},
journal = {IEEE Transactions on Information Theory},
volume = {59},
number = {9},
abstract = {Unique lifting factorization results for group lifting structures are used to characterize the group-theoretic structure of two-channel linear phase FIR perfect reconstruction filter bank groups. For Dinvariant, order-increasing group lifting structures, it is shown that the associated lifting cascade group C is isomorphic to the free product of the upper and lower triangular lifting matrix groups. Under the same hypotheses, the associated scaled lifting group S is the semidirect product of C by the diagonal gain scaling matrix group D. These results apply to the group lifting structures for the two principal classes of linear phase perfect reconstruction filter banks, the whole- and half-sample symmetric classes. Since the unimodular whole-sample symmetric class forms a group, W, that is in fact equal to its own scaled lifting group, W = SW, the results of this paper characterize the group-theoretic structure of W up to isomorphism. Although the half-sample symmetric class H does not form a group, it can be partitioned into cosets of its lifting cascade group, CH, or, alternatively, into cosets of its scaled lifting group, SH. Homomorphic comparisons reveal that scaled lifting groups covered by the results in this paper have a structure analogous to a 'noncommutative vector space.'},
note = {LA-UR-12-20858},
keywords = {Filter bank, free product, group, group lifting structure, JPEG 2000., Lifting, Linear phase filter, Polyphase matrix, semidirect product, Unique factorization, Wavelet},
pubstate = {published},
tppubtype = {article}
}
Ahrens, James; Sewell, Christopher
Numerically Intensive and Data Intensive Computing: Issues, Approaches and Leveraging Presentation
05.08.2013, (LA-UR-13-26190).
Abstract | Links | BibTeX | Tags: Data Intensive Computing, Numerically Intensive Computing
@misc{Ahrens2013,
title = {Numerically Intensive and Data Intensive Computing: Issues, Approaches and Leveraging},
author = {James Ahrens and Christopher Sewell
},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/08/NumericallyIntensiveAndDataIntensiveComputingIssuesApproachesAndLeveraging.pdf},
year = {2013},
date = {2013-08-05},
abstract = {This presentation discusses the impact of the technologies developed for numerically intensive and exascale computing on data-intensive computing and the broader industrial computing infrastructure.},
note = {LA-UR-13-26190},
keywords = {Data Intensive Computing, Numerically Intensive Computing},
pubstate = {published},
tppubtype = {presentation}
}
Sewell, Christopher
Portable Data-Parallel Visualization and Analysis Operators Presentation
20.03.2013, (LA-UR-13-21884).
Abstract | Links | BibTeX | Tags: data parallel, gpu, PINION, PISTON
@misc{Sewell2013,
title = {Portable Data-Parallel Visualization and Analysis Operators},
author = {Christopher Sewell},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/08/PortableDataParallelVisualizationAndAnalysisOperators.pdf},
year = {2013},
date = {2013-03-20},
abstract = {This presentation describes the overall goal of PISTON and PINION (to provide high parallel performance on current and next-generation supercomputers using portable, data-parallel code), and summarizes the work on these projects to date. It is intended for an audience at NVIDIA's GPU Technology Conference, and thus has an emphasis on how it uses Thrust to write code that obtains good parallel performance when compiled to different backends, including CUDA.},
note = {LA-UR-13-21884},
keywords = {data parallel, gpu, PINION, PISTON},
pubstate = {published},
tppubtype = {presentation}
}
Sewell, Christopher; Lo, Li-ta; Ahrens, James
PISTON: An SDAV Framework for Portable High-Performance Data-Parallel Visualization and Analysis Operators Presentation
22.02.2013, (LA-UR-13-21083).
Abstract | Links | BibTeX | Tags: PISTON, VTK-m
@misc{Sewell2013b,
title = {PISTON: An SDAV Framework for Portable High-Performance Data-Parallel Visualization and Analysis Operators},
author = {Christopher Sewell and Li-ta Lo and James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/08/PISTON2.pdf},
year = {2013},
date = {2013-02-22},
abstract = {This presentation describes the overall goal of PISTON (to provide portability and performance for visualization and analysis operators on current and next-generation supercomputers), and summarizes the work on PISTON in relation to the SDAV (The SciDac Institute of Scalable Data Management, Analysis, and Visualization) Milestones. Specifically, it presents work related to general PISTON algorithm and infrastructure development; the halo finder operator; PISTON integration into VTK and ParaView; VPIC in-situ PISTON pipelines; and publications, presentations, and tutorials.},
note = {LA-UR-13-21083},
keywords = {PISTON, VTK-m},
pubstate = {published},
tppubtype = {presentation}
}
Patchett, John
Applications of In Situ Visualization for Ocean, Cosmology, and Plasma Presentation
20.02.2013, (LA-UR-13-21112).
Abstract | Links | BibTeX | Tags: cosmology, in situ, oceanography simulation and modeling, plasma
@misc{Patchett2013b,
title = {Applications of In Situ Visualization for Ocean, Cosmology, and Plasma},
author = {John Patchett},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/08/Applications_of_In_Situ_Visualization_for_Ocean_Cosmology_and_Plasma.pdf},
year = {2013},
date = {2013-02-20},
abstract = {This is a five minute or less talk for the Office of Science SDAV All Hands Meeting on 2/20/2013. It describes our work with three domains of science: ocean modeling (POP), cosmology(HACC), and plasma(VPIC). In particular it presents work that was directly related to in situ analysis and our future work with these models under SDAV.},
note = {LA-UR-13-21112},
keywords = {cosmology, in situ, oceanography simulation and modeling, plasma},
pubstate = {published},
tppubtype = {presentation}
}
Nouanesengsy, Boonthanome; Patchett, John; Ahrens, James; Bauer, Andrew; Chaudhary, Aashish; Miller, Ross; Geveci, Berk; Shipman, Galen; Williams, Dean N
A model for optimizing file access patterns using spatio-temporal parallelism Proceedings Article
In: Proceedings of the 8th International Workshop on Ultrascale Visualization, pp. 4, ACM 2013, (LA-UR-pending).
Abstract | Links | BibTeX | Tags: Data Analysis, file access, I/O, Modeling, Modeling techniques, optimizing, parallel programming, Parallel Techniques, patio-temporal parallelism, visualization
@inproceedings{nouanesengsy2013model,
title = {A model for optimizing file access patterns using spatio-temporal parallelism},
author = {Boonthanome Nouanesengsy and John Patchett and James Ahrens and Andrew Bauer and Aashish Chaudhary and Ross Miller and Berk Geveci and Galen Shipman and Dean N Williams},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/AModelForOptimizingFileAccessPatternsUsingSpatio-TemporalParallelism.pdf},
year = {2013},
date = {2013-01-01},
booktitle = {Proceedings of the 8th International Workshop on Ultrascale Visualization},
pages = {4},
organization = {ACM},
abstract = {For many years now, I/O read time has been recognized as the primary bottleneck for parallel visualization and analysis of large-scale data. In this paper, we introduce a model that can estimate the read time for a file stored in a parallel filesystem when given the file access pattern. Read times ultimately depend on how the file is stored and the access pattern used to read the file. The file access pattern will be dictated by the type of parallel decomposition used. We employ spatio-temporal parallelism, which combines both spatial and temporal parallelism, to provide greater flexibility to possible file access patterns. Using our model, we were able to configure the spatio-temporal parallelism to design optimized read access patterns that resulted in a speedup factor of approximately 400 over traditional file access patterns.},
note = {LA-UR-pending},
keywords = {Data Analysis, file access, I/O, Modeling, Modeling techniques, optimizing, parallel programming, Parallel Techniques, patio-temporal parallelism, visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
Woodring, Jonathan; Ahrens, James; Tautges, Timothy J; Peterka, Tom; Vishwanath, Venkatram; Geveci, Berk
On-demand unstructured mesh translation for reducing memory pressure during in situ analysis Proceedings Article
In: Proceedings of the 8th International Workshop on Ultrascale Visualization, pp. 3, ACM 2013, (LA-UR-13-27909).
Abstract | Links | BibTeX | Tags: in-situ, memory pressure, mesh translation, on-demand, unstructured mesh
@inproceedings{woodring2013demand,
title = {On-demand unstructured mesh translation for reducing memory pressure during in situ analysis},
author = {Jonathan Woodring and James Ahrens and Timothy J Tautges and Tom Peterka and Venkatram Vishwanath and Berk Geveci},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/On-DemandUnstructuredMeshTranslationForReducingMemoryPressureDuringInSituAnalysis.pdf},
year = {2013},
date = {2013-01-01},
booktitle = {Proceedings of the 8th International Workshop on Ultrascale Visualization},
pages = {3},
organization = {ACM},
abstract = {When coupling two different mesh-based codes, for example with in situ analytics, the typical strategy is to explicitly copy data (deep copy) from one implementation to another, doing translation in the process. This is necessary because codes usually do not share data model interfaces or imple- mentations. The drawback is that data duplication results in an increased memory footprint for the coupled code. An alternative strategy, which we study in this paper, is to share mesh data through on-demand, fine-grained, run-time data model translation. This saves memory, which is an increas- ingly scarce resource at exascale, for the increased use of in situ analysis and decreasing memory per core. We study the performance of our method compared against a deep copy with in situ analysis at scale.},
note = {LA-UR-13-27909},
keywords = {in-situ, memory pressure, mesh translation, on-demand, unstructured mesh},
pubstate = {published},
tppubtype = {inproceedings}
}
Brislawn, Christopher M.
On the group-theoretic structure of lifted filter banks Book Chapter
In: Andrews, Travis; Balan, Radu; Benedetto, John; Czaja, Wojciech; Okoudjou, Kasso (Ed.): Excursions in Harmonic Analysis, vol.~2, pp. 113-135, Birkh, Basel, 2013, (LA-UR-12-21217).
Abstract | Links | BibTeX | Tags: Filter bank, Group lift- ing structure, Group theory, group-theoretic structure, JPEG 2000, lifted filter banks, Lifting, Linear phase filter, Matrix polynomial, Polyphase matrix, Unique factorization, Wavelet
@inbook{Brislawn2013,
title = {On the group-theoretic structure of lifted filter banks},
author = {Christopher M. Brislawn},
editor = {Travis Andrews and Radu Balan and John Benedetto and Wojciech Czaja and Kasso Okoudjou},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/OnTheGroup-TheoreticStructureOfLiftedFilterBanks.pdf},
doi = {10.1007/978-0-8176-8379-5_6},
year = {2013},
date = {2013-01-01},
booktitle = {Excursions in Harmonic Analysis, vol.~2},
pages = {113-135},
publisher = {Birkh},
address = {Basel},
series = {Applied and Numerical Harmonic Analysis},
abstract = {The polyphase-with-advance matrix representations of whole-sample symmetric (WS) unimodular filter banks form a multiplicative matrix Laurent poly- nomial group. Elements of this group can always be factored into lifting matrices with half-sample symmetric (HS) off-diagonal lifting filters; such linear phase lift- ing factorizations are specified in the ISO/IEC JPEG 2000 image coding standard. Half-sample symmetric unimodular filter banks do not form a group, but such filter banks can be partially factored into a cascade of whole-sample antisymmetric (WA) lifting matrices starting from a concentric, equal-length HS base filter bank. An al- gebraic framework called a group lifting structure has been introduced to formalize the group-theoretic aspects of matrix lifting factorizations. Despite their pronounced differences, it has been shown that the group lifting structures for both the WS and HS classes satisfy a polyphase order-increasing property that implies uniqueness (“modulo rescaling”) of irreducible group lifting factorizations in both group lifting structures. These unique factorization results can in turn be used to characterize the group-theoretic structure of the groups generated by the WS and HS group lifting structures.},
note = {LA-UR-12-21217},
keywords = {Filter bank, Group lift- ing structure, Group theory, group-theoretic structure, JPEG 2000, lifted filter banks, Lifting, Linear phase filter, Matrix polynomial, Polyphase matrix, Unique factorization, Wavelet},
pubstate = {published},
tppubtype = {inbook}
}
Sewell, Christopher; Lo, Li-ta; Ahrens, James
Portable data-parallel visualization and analysis in distributed memory environments Proceedings Article
In: Large-Scale Data Analysis and Visualization (LDAV), 2013 IEEE Symposium on, pp. 25–33, IEEE 2013, (LA-UR-13-23809).
Abstract | Links | BibTeX | Tags: analysis, Concurrent Programming, data-parallel, distributed memory, parallel programming, PISTON, visualization
@inproceedings{sewell2013portable,
title = {Portable data-parallel visualization and analysis in distributed memory environments},
author = {Christopher Sewell and Li-ta Lo and James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/PortableData-ParallelVisualizationAndAnalysisInDistributedMemoryEnvironments.pdf},
year = {2013},
date = {2013-01-01},
booktitle = {Large-Scale Data Analysis and Visualization (LDAV), 2013 IEEE Symposium on},
pages = {25--33},
organization = {IEEE},
abstract = {Data-parallelism is a programming model that maps well to architectures with a high degree of concurrency. Algorithms written using data-parallel primitives can be easily ported to any architecture for which an implementation of these primitives exists, making efficient use of the available parallelism on each. We have previously published results demonstrating our ability to compile the same data-parallel code for several visualization algorithms onto different on-node parallel architectures (GPUs and multi-core CPUs) using our extension of NVIDIAÕs Thrust library. In this paper, we discuss our extension of Thrust to support concurrency in distributed memory environments across multiple nodes. This enables the application developer to write data-parallel algorithms while viewing the data as single, long vectors, essentially without needing to explicitly take into consideration whether the values are actually distributed across nodes. Our distributed wrapper for Thrust handles the communication in the backend using MPI, while still using the standard Thrust library to take advantage of available on-node parallelism. We describe the details of our distributed implementations of several key data-parallel primitives, including scan, scatter/ gather, sort, reduce, and upper/lower bound. We also present two higher-level distributed algorithms developed using these primitives: isosurface and KD-tree construction. Finally, we provide timing results demonstrating the ability of these algorithms to take advantage of available parallelism on nodes and across multiple nodes, and discuss scaling limitations for communication-intensive algorithms such as KD-tree construction.},
note = {LA-UR-13-23809},
keywords = {analysis, Concurrent Programming, data-parallel, distributed memory, parallel programming, PISTON, visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
Su, Yu; Agrawal, Gagan; Woodring, Jonathan; Myers, Kary; Wendelberger, Joanne; Ahrens, James
Taming massive distributed datasets: data sampling using bitmap indices Proceedings Article
In: Proceedings of the 22nd international symposium on High-performance parallel and distributed computing, pp. 13–24, ACM 2013, (LA-UR-pending).
Abstract | Links | BibTeX | Tags: bitmap indices, database applications, database management, performance
@inproceedings{su2013taming,
title = {Taming massive distributed datasets: data sampling using bitmap indices},
author = {Yu Su and Gagan Agrawal and Jonathan Woodring and Kary Myers and Joanne Wendelberger and James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/TamingMassiveDistributedDatasetsDataSamplingUsingBitmapIndices.pdf},
year = {2013},
date = {2013-01-01},
booktitle = {Proceedings of the 22nd international symposium on High-performance parallel and distributed computing},
pages = {13--24},
organization = {ACM},
abstract = {In this paper, we address the data explosion problems by developing a novel sampling approach, and implementing it in a flexible system that supports server-side sampling and data subsetting. We observe that to allow subsetting over scientific datasets, data repositories are likely to use an indexing technique. Among these techniques, we see that bitmap indexing can not only effectively support subsetting over scientific datasets, but can also help create samples that preserve both value and spatial distributions over scientific datasets. We have developed algorithms for using bitmap indices to sample datasets. We have also shown how only a small amount of additional metadata stored with bitvectors can help assess loss of accuracy with a particular subsampling level. Some of the other properties of this novel approach include: 1) sampling can be flexibly applied to a subset of the original dataset, which may be specified using a value-based and/or a dimension-based subsetting predicate, and 2) no data reorganization is needed, once bitmap indices have been generated. We have extensively evaluated our method with different types of datasets and applications, and demonstrated the effectiveness of our approach.},
note = {LA-UR-pending},
keywords = {bitmap indices, database applications, database management, performance},
pubstate = {published},
tppubtype = {inproceedings}
}
Ahrens, James; Sewell, Chris; Patchett, John
SDAV Visualization Area: VTK-m and In-Situ Highlights at Los Alamos Technical Report
2013, (LA-UR-13-27063).
Links | BibTeX | Tags: in situ, VTK-m
@techreport{info:lanl-repo/lareport/LA-UR-13-27063,
title = {SDAV Visualization Area: VTK-m and In-Situ Highlights at Los Alamos},
author = {James Ahrens and Chris Sewell and John Patchett},
url = {http://datascience.dsscale.org/wp-content/uploads/2017/09/LA-UR-13-27063.pdf},
year = {2013},
date = {2013-01-01},
note = {LA-UR-13-27063},
keywords = {in situ, VTK-m},
pubstate = {published},
tppubtype = {techreport}
}
Bujack, Roxana; De, Hendrik; De, Nele; Scheuermann, Gerik
Convolution Products for Hypercomplex Fourier Transforms Journal Article
In: Journal of Mathematical Imaging and Vision, pp. 1-19, 2013.
@article{BBSS13,
title = {Convolution Products for Hypercomplex Fourier Transforms},
author = {Roxana Bujack and Hendrik De and Nele De and Gerik Scheuermann},
url = {https://datascience.dsscale.org/wp-content/uploads/2019/02/ConvolutionProductsforHypercomplexFourierTransforms.pdf},
doi = {10.1007/s10851-013-0430-y},
year = {2013},
date = {2013-01-01},
journal = {Journal of Mathematical Imaging and Vision},
pages = {1-19},
publisher = {Springer US},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Bujack, Roxana; Scheuermann, Gerik; Hitzer, Eckhard
Demystification of the Geometric Fourier Transforms Journal Article
In: AIP Conference Proceedings, vol. 1558, 2013.
@article{BSH13b,
title = {Demystification of the Geometric Fourier Transforms},
author = {Roxana Bujack and Gerik Scheuermann and Eckhard Hitzer},
url = {https://datascience.dsscale.org/wp-content/uploads/2019/02/DemystificationoftheGeometricFourierTransforms.pdf},
doi = {http://scitation.aip.org/content/aip/proceeding/aipcp/10.1063/1.4825543},
year = {2013},
date = {2013-01-01},
journal = {AIP Conference Proceedings},
volume = {1558},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Bujack, Roxana; Scheuermann, Gerik; Hitzer, Eckhard
Detection of Outer Rotations on 3D-Vector Fields with Iterative Geometric Correlation and its Efficiency Journal Article
In: Advances in Applied Clifford Algebras, pp. 1-19, 2013.
@article{BSH13c,
title = {Detection of Outer Rotations on 3D-Vector Fields with Iterative Geometric Correlation and its Efficiency},
author = {Roxana Bujack and Gerik Scheuermann and Eckhard Hitzer},
url = {https://datascience.dsscale.org/wp-content/uploads/2019/02/DetectionofOuterRotationson3D-VectorFieldswithIterativeGeometricCorrelationanditsEfficiency.pdf},
doi = {10.1007/s00006-013-0411-7},
year = {2013},
date = {2013-01-01},
journal = {Advances in Applied Clifford Algebras},
pages = {1-19},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Hitzer, Eckhard; Bujack, Roxana; Scheuermann, Gerik
Vector field computations in Clifford's geometric algebra Journal Article
In: Third SICE Symposium on Computational Intelligence, 2013, Osaka University, 2013.
@article{HBS13,
title = {Vector field computations in Clifford's geometric algebra},
author = {Eckhard Hitzer and Roxana Bujack and Gerik Scheuermann},
url = {https://datascience.dsscale.org/wp-content/uploads/2019/02/VectorfieldcomputationsinCliffordsgeometricalgebra.pdf},
year = {2013},
date = {2013-01-01},
journal = {Third SICE Symposium on Computational Intelligence, 2013, Osaka University},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
2012
Bent, John; Faibish, Sorin; Ahrens, James; Grider, Gary; Patchett, John; Tzelnic, Percy; Woodring, Jonathan
Jitter-free co-processing on a prototype exascale storage stack Proceedings Article
In: 012 IEEE 28th Symposium on Mass Storage Systems and Technologies (MSST), pp. 1–5, IEEE 2012, ISSN: 2160-195X, (LA-UR-pending).
Links | BibTeX | Tags: co-processing, excascale, jitter-free
@inproceedings{Bent2012,
title = {Jitter-free co-processing on a prototype exascale storage stack},
author = {John Bent and Sorin Faibish and James Ahrens and Gary Grider and John Patchett and Percy Tzelnic and Jonathan Woodring },
url = {http://datascience.dsscale.org/wp-content/uploads/2016/12/Jitter-FreeCo-ProcessingonaPrototypeExascaleStorageStack.pdf},
doi = {10.1109/MSST.2012.6232382},
issn = {2160-195X},
year = {2012},
date = {2012-04-01},
booktitle = {012 IEEE 28th Symposium on Mass Storage Systems and Technologies (MSST)},
pages = {1--5},
organization = {IEEE},
note = {LA-UR-pending},
keywords = {co-processing, excascale, jitter-free},
pubstate = {published},
tppubtype = {inproceedings}
}
Williams, Sean; Petersen, Mark; Hecht, Matthew; Maltrud, Mathew; Hamann, Bernd; Patchett, John; Ahrens, James; Hamann, Bernd
Interface Exchange as an Indicator for Eddy Heat Transport Proceedings Article
In: Computer Graphics Forum, pp. 1125-1134, Wiley Online Library 2012, (LA-UR-12-21038).
Abstract | Links | BibTeX | Tags: applications, eddies, interface exchange, Ocean General Circulation Models, oceanography simulation and modeling, simulation output analysis
@inproceedings{williams2012interface,
title = {Interface Exchange as an Indicator for Eddy Heat Transport},
author = {Sean Williams and Mark Petersen and Matthew Hecht and Mathew Maltrud and Bernd Hamann and John Patchett and James Ahrens and Bernd Hamann },
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/InterfaceExchangeAsAnIndicatorForEddyHeatTransport.pdf},
year = {2012},
date = {2012-01-01},
booktitle = {Computer Graphics Forum},
volume = {31},
number = {3pt3},
pages = {1125-1134},
organization = {Wiley Online Library},
abstract = {The ocean contains many large-scale, long-lived vortices, called mesoscale eddies, that are believed to have a role in the transport and redistribution of salt, heat, and nutrients throughout the ocean. Determining this role, however, has proven to be a challenge, since the mechanics of eddies are only partly understood; a standard definition for these ocean eddies does not exist and, therefore, scientifically meaningful, robust methods for eddy extraction, characterization, tracking and visualization remain a challenge. To shed light on the nature and potential roles of eddies, we extend our previous work on eddy identification and tracking to construct a new metric to characterize the transfer of water into and out of eddies across their boundary, and produce several visualizations of this new metric to provide clues about the role eddies play in the global ocean.},
note = {LA-UR-12-21038},
keywords = {applications, eddies, interface exchange, Ocean General Circulation Models, oceanography simulation and modeling, simulation output analysis},
pubstate = {published},
tppubtype = {inproceedings}
}
Lo, Li-ta; Sewell, Christopher; Ahrens, James
PISTON: A Portable Cross-Platform Framework for Data-Parallel Visualization Operators. Proceedings Article
In: EGPGV, pp. 11–20, 2012, (LA-UR-12-10227).
Abstract | Links | BibTeX | Tags: Concurrent Programming, parallel programming, PISTON
@inproceedings{lo2012piston,
title = {PISTON: A Portable Cross-Platform Framework for Data-Parallel Visualization Operators.},
author = {Li-ta Lo and Christopher Sewell and James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/PISTONAPortableCrossPlatformFrameworkForData-ParallelVisualizationOperators.pdf},
year = {2012},
date = {2012-01-01},
booktitle = {EGPGV},
pages = {11--20},
abstract = {Due to the wide variety of current and next-generation supercomputing architectures, the development of highperformance parallel visualization and analysis operators frequently requires re-writing the underlying algorithms for many different platforms. In order to facilitate portability, we have devised a framework for creating such operators that employs the data-parallel programming model. By writing the operators using only data-parallel primitives (such as scans, transforms, stream compactions, etc.), the same code may be compiled to multiple targets using architecture-specific backend implementations of these primitives. Specifically, we make use of and extend NVIDIAÕs Thrust library, which provides CUDA and OpenMP backends. Using this framework, we have implemented isosurface, cut surface, and threshold operators, and have achieved good parallel performance on two different architectures (multi-core CPUs and NVIDIA GPUs) using the exact same operator code. We have applied these operators to several large, real scientific data sets, and have open-source released a beta version of our code base.},
note = {LA-UR-12-10227},
keywords = {Concurrent Programming, parallel programming, PISTON},
pubstate = {published},
tppubtype = {inproceedings}
}
Brownlee, Carson; Patchett, John; Lo, Li-Ta; DeMarle, David; Mitchell, Christopher; Ahrens, James; Hansen, Charles
A Study of Ray Tracing Large-Scale Scientific Data in Parallel Visualization Applications Proceedings Article
In: Eurographics Symposium on Parallel Graphics and Visualization, pp. 51–60, The Eurographics Association 2012, (LA-UR-pending).
Abstract | Links | BibTeX | Tags: Distributed/network graphics, Graphics Systems, parallel, ray tracing, visualization
@inproceedings{brownlee2012study,
title = {A Study of Ray Tracing Large-Scale Scientific Data in Parallel Visualization Applications},
author = {Carson Brownlee and John Patchett and Li-Ta Lo and David DeMarle and Christopher Mitchell and James Ahrens and Charles Hansen},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/RayTracing.pdf},
year = {2012},
date = {2012-01-01},
booktitle = {Eurographics Symposium on Parallel Graphics and Visualization},
pages = {51--60},
organization = {The Eurographics Association},
abstract = {Large-scale analysis and visualization is becoming increasingly important as supercomputers and their simula- tions produce larger and larger data. These large data sizes are pushing the limits of traditional rendering algo- rithms and tools thus motivating a study exploring these limits and their possible resolutions through alternative rendering algorithms . In order to better understand real-world performance with large data, this paper presents a detailed timing study on a large cluster with the widely used visualization tools ParaView and VisIt. The soft- ware ray tracer Manta was integrated into these programs in order to show that improved performance could be attained with software ray tracing on a distributed memory, GPU enabled, parallel visualization resource. Using the Texas Advanced Computing Center’s Longhorn cluster which has multi-core CPUs and GPUs with large-scale polygonal data, we find multi-core CPU ray tracing to be significantly faster than both software rasterization and hardware-accelerated rasterization in existing scientific visualization tools with large data.
},
note = {LA-UR-pending},
keywords = {Distributed/network graphics, Graphics Systems, parallel, ray tracing, visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
Brislawn, Christopher M.; Woodring, Jonathan; Mniszewski, Susan; DeMarle, David; Ahrens, James
Subband coding for large-scale scientific simulation data using JPEG 2000 Proceedings Article
In: Image Analysis and Interpretation (SSIAI), 2012 IEEE Southwest Symposium on, pp. 201–204, IEEE 2012, (LA-UR-12-1352).
Abstract | Links | BibTeX | Tags: JPEG 2000, scientific simulation data, subband coding
@inproceedings{brislawn2012subband,
title = {Subband coding for large-scale scientific simulation data using JPEG 2000},
author = {Christopher M. Brislawn and Jonathan Woodring and Susan Mniszewski and David DeMarle and James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/SubbandCodingForLarge-ScaleScientificSimulationDataUsingJPEG2000.pdf},
year = {2012},
date = {2012-01-01},
booktitle = {Image Analysis and Interpretation (SSIAI), 2012 IEEE Southwest Symposium on},
pages = {201--204},
organization = {IEEE},
abstract = {The ISO/IEC JPEG 2000 image coding standard is a family of source coding algorithms targeting high-resolution image communications. JPEG 2000 features highly scalable embedded coding features that allow one to interactively zoom out to reduced resolution thumbnails of enormous data sets or to zoom in on highly localized regions of interest with very economical communications and rendering requirements. While intended for fixed-precision input data, the implementation of the irreversible version of the standard is often done internally in floating point arithmetic. Moreover, the standard is designed to support high-bit-depth data. Part 2 of the standard also provides support for three-dimensional data sets such as multicomponent or volumetric imagery. These features make JPEG 2000 an appealing candidate for highly scalable communications coding and visualization of two- and three-dimensional data produced by scientific simulation software. We present results of initial experiments applying JPEG 2000 to scientific simulation data produced by the Parallel Ocean Program (POP) global ocean circulation model, highlighting both the promise and the many challenges this approach holds for scientific visualization applications.},
note = {LA-UR-12-1352},
keywords = {JPEG 2000, scientific simulation data, subband coding},
pubstate = {published},
tppubtype = {inproceedings}
}
Sewell, Christopher; Meredith, Jeremy; Moreland, Kenneth; Peterka, Tom; DeMarle, David; Lo, Li-ta; Ahrens, James; Maynard, Robert; Geveci, Berk
The SDAV software frameworks for visualization and analysis on next-generation multi-core and many-core architectures Proceedings Article
In: High Performance Computing, Networking, Storage and Analysis (SCC), 2012 SC Companion:, pp. 206–214, IEEE 2012, (LA-UR-12-26928).
Abstract | Links | BibTeX | Tags: data-parallel, in-situ, many-core architectures, mult-core architectures, visualization, VTK-m
@inproceedings{sewell2012sdav,
title = {The SDAV software frameworks for visualization and analysis on next-generation multi-core and many-core architectures},
author = {Christopher Sewell and Jeremy Meredith and Kenneth Moreland and Tom Peterka and David DeMarle and Li-ta Lo and James Ahrens and Robert Maynard and Berk Geveci},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/TheSDAVSoftwareFrameworksForVisualizationAndAnalysisOnNext-GenerationMulti-CoreAndMany-CoreArchitectures.pdf},
year = {2012},
date = {2012-01-01},
booktitle = {High Performance Computing, Networking, Storage and Analysis (SCC), 2012 SC Companion:},
pages = {206--214},
organization = {IEEE},
abstract = {This paper surveys the four software frameworks being developed as part of the visualization pillar of the SDAV (Scalable Data Management, Analysis, and Visualization) Institute, one of the SciDAC (Scientific Discovery through Advanced Computing) Institutes established by the ASCR (Advanced Scientific Computing Research) Program of the U.S. Department of Energy. These frameworks include EAVL (Extreme-scale Analysis and Visualization Library), Dax (Data Analysis at Extreme), DIY (Do It Yourself), and PISTON. The objective of these frameworks is to facilitate the adaptation of visualization and analysis algorithms to take advantage of the available parallelism in emerging multi-core and manycore hardware architectures, in anticipation of the need for such algorithms to be run in-situ with LCF (leadership-class facilities) simulation codes on supercomputers.},
note = {LA-UR-12-26928},
keywords = {data-parallel, in-situ, many-core architectures, mult-core architectures, visualization, VTK-m},
pubstate = {published},
tppubtype = {inproceedings}
}
2011
Williams, Sean; Petersen, Mark; Bremer, Peer-Timo; Hecht, Matthew; Pascucci, Valerio; Ahrens, James; Hlawitschka, Mario; Hamann, Bernd
Adaptive extraction and quantification of geophysical vortices Journal Article
In: Visualization and Computer Graphics, IEEE Transactions on, vol. 17, no. 12, pp. 2088–2095, 2011, (LA-UR-11-04444).
Abstract | Links | BibTeX | Tags: adaptive extraction, feature extraction, Geophysical Vortices, Quantification, statistical data analysis, Vortex extraction
@article{williams2011adaptive,
title = {Adaptive extraction and quantification of geophysical vortices},
author = {Sean Williams and Mark Petersen and Peer-Timo Bremer and Matthew Hecht and Valerio Pascucci and James Ahrens and Mario Hlawitschka and Bernd Hamann},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/AdaptiveExtractionAndQuantificaitonOfGeophysicalVortices.pdf},
year = {2011},
date = {2011-01-01},
journal = {Visualization and Computer Graphics, IEEE Transactions on},
volume = {17},
number = {12},
pages = {2088--2095},
publisher = {IEEE},
abstract = {We consider the problem of extracting discrete two-dimensional vortices from a turbulent flow. In our approach we use a reference model describing the expected physics and geome try of an idealized vortex. The model allows us to derive a novel correlation between the size of the vortex and its strength, measured as the square of its strain minus the square of its vorticity. For vortex detection in real models we use the strength parameter to locate potential vortex cores, then measure the similarity of our ideal analytical vortex and the real vortex core for different strength thresholds. This approach provides a metric for how well a vortex core is modeled by an ideal vortex. Moreover, this provides insight into the problem of choosing the thresholds that identify a vortex. By selecting a target coefficient of determination (i.e., statistical confidence), we determine on a per-vortex basis what threshold of the strength parameter would be required to extract that vortex at the chosen confidence. We validate our approach on real dat a from a global ocean simulation and derive from it a map of expected vortex strengths over the global ocean.},
note = {LA-UR-11-04444},
keywords = {adaptive extraction, feature extraction, Geophysical Vortices, Quantification, statistical data analysis, Vortex extraction},
pubstate = {published},
tppubtype = {article}
}
Woodring, Jonathan; Heitmann, Katrin; Ahrens, James; Fasel, Patricia; Hsu, Chung-Hsing; Habib, Salman; Pope, Adrian
Analyzing and visualizing cosmological simulations with ParaView Journal Article
In: The Astrophysical Journal Supplement Series, vol. 195, no. 1, pp. 11, 2011, (LA-UR-10-06301).
Abstract | Links | BibTeX | Tags: cosmology, large-scale structure of universe, numerical methods, ParaView
@article{woodring2011analyzing,
title = {Analyzing and visualizing cosmological simulations with ParaView},
author = {Jonathan Woodring and Katrin Heitmann and James Ahrens and Patricia Fasel and Chung-Hsing Hsu and Salman Habib and Adrian Pope},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/AnalyzingAndVisualizingCosmologicalSimulationsWithParaView.pdf},
year = {2011},
date = {2011-01-01},
journal = {The Astrophysical Journal Supplement Series},
volume = {195},
number = {1},
pages = {11},
publisher = {IOP Publishing},
abstract = {The advent of large cosmological sky surveys – ushering in the era of precision cosmology – has been accompanied by ever larger cosmological simulations. The analysis of these simulations, which currently encompass tens of billions of particles and up to trillion particles in the near future, is often as daunting as carrying out the simulations in the first place. Therefore, the development of very efficient analysis tools combining qualitative and quantitative capabilities is a matter of some urgency. In this paper we introduce new analysis features implemented within ParaView, a parallel, open-source visualization toolkit, to analyze large N-body simulations. The new features include particle readers and a very efficient halo finder which identifies friends-of-friends halos and determines common halo properties. In combination with many other functionalities already existing within ParaView, such as histogram routines or interfaces to Python, this enhanced version enables fast, interactive, and convenient analyses of large cosmological simulations. In addition, development paths are available for future extensions.},
note = {LA-UR-10-06301},
keywords = {cosmology, large-scale structure of universe, numerical methods, ParaView},
pubstate = {published},
tppubtype = {article}
}
Woodring, Jonathan; Ahrens, James; Figg, Jeannette; Wendelberger, Joanne; Habib, Salman; Heitmann, Katrin
In-situ Sampling of a Large-Scale Particle Simulation for Interactive Visualization and Analysis Proceedings Article
In: Computer Graphics Forum, pp. 1151–1160, Wiley Online Library 2011, (LA-UR-11-02106).
Abstract | Links | BibTeX | Tags: in-situ, large-scale particle simulation, sampling, visualization
@inproceedings{woodring2011situ,
title = {In-situ Sampling of a Large-Scale Particle Simulation for Interactive Visualization and Analysis},
author = {Jonathan Woodring and James Ahrens and Jeannette Figg and Joanne Wendelberger and Salman Habib and Katrin Heitmann},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/In-situSamplingOfALarge-ScaleParticleSimulationForInteractiveVisualizationAndAnalysis.pdf},
year = {2011},
date = {2011-01-01},
booktitle = {Computer Graphics Forum},
volume = {30},
number = {3},
pages = {1151--1160},
organization = {Wiley Online Library},
abstract = {We describe a simulation-time random sampling of a large-scale particle simu lation, the RoadRunner Universe MC 3 cosmological simulation, for interactive post-analysis and visualization. Simu lation data generation rates will continue to be far greater than storage bandwidth rates by many orders of magnitude. This implies that only a very small fraction of data generated by a simulation can ever be stored a nd subsequently post-analyzed. The limiting factors in this situation are similar to the problem in many population surveys : there aren’t enough human resources to query a large population. To cope with the lack of resource s, statistical sampling techniques are used to create a representative data set of a large population. Following this analo gy, we propose to store a simulation-time random sampling of the particle data for post-analysis, with level-of-detail organization, to cope with the bottlenecks. A sample is stored directly from the simulation in a level-of-detail for mat for post-visualization and analysis, which amortizes the cost of post-processing and reduces wo rkflow time. Additionally by sampling during the simulation, we are able to analyze the entire particle population to record full population statistics and quantify sample error.},
note = {LA-UR-11-02106},
keywords = {in-situ, large-scale particle simulation, sampling, visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
Nouanesengsy, Boonthanome; Ahrens, James; Woodring, Jonathan; Shen, Han-Wei
Revisiting parallel rendering for shared memory machines Proceedings Article
In: Proceedings of the 11th Eurographics conference on Parallel Graphics and Visualization, pp. 31–40, Eurographics Association 2011, (LA-UR-11-02086).
Abstract | Links | BibTeX | Tags: hardware architecture, parallel processing, parallel rendering
@inproceedings{nouanesengsy2011revisiting,
title = {Revisiting parallel rendering for shared memory machines},
author = {Boonthanome Nouanesengsy and James Ahrens and Jonathan Woodring and Han-Wei Shen},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/RevisitingParallelRenderingForSharedMemoryMachines.pdf},
year = {2011},
date = {2011-01-01},
booktitle = {Proceedings of the 11th Eurographics conference on Parallel Graphics and Visualization},
pages = {31--40},
organization = {Eurographics Association},
abstract = {Increasing the core count of CPUs to increase computational performance has been a significant trend for the better part of a decade. This has led to an unprecedented availability of large shared memory machines. Programming paradigms and systems are shifting to take advantage of this architectural change, so that intra-node parallelism can be fully utilized. Algorithms designed for parallel execution on distributed systems will also need to be modified to scale in these new shared and hybrid memory systems. In this paper, we reinvestigate parallel rendering algorithms with the goal of finding one that achieves favorable performance in this new environment. We test and analyze various methods, including sort-first, sort-last, and a hybrid scheme, to find an optimal parallel algorithm that maximizes shared memory performance.},
note = {LA-UR-11-02086},
keywords = {hardware architecture, parallel processing, parallel rendering},
pubstate = {published},
tppubtype = {inproceedings}
}
Woodring, Jonathan; Mniszewski, Susan; Brislawn, Christopher M.; DeMarle, David; Ahrens, James
Revisiting wavelet compression for large-scale climate data using JPEG 2000 and ensuring data precision Proceedings Article
In: Large Data Analysis and Visualization (LDAV), 2011 IEEE Symposium on, pp. 31–38, IEEE 2011, (LA-UR-pending).
Abstract | Links | BibTeX | Tags: climate modeling, coding and information theory, data compaction and compression, JPEG 2000, Wavelet
@inproceedings{woodring2011revisiting,
title = {Revisiting wavelet compression for large-scale climate data using JPEG 2000 and ensuring data precision},
author = {Jonathan Woodring and Susan Mniszewski and Christopher M. Brislawn and David DeMarle and James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/RevisitingWaveletComp.pdf},
year = {2011},
date = {2011-01-01},
booktitle = {Large Data Analysis and Visualization (LDAV), 2011 IEEE Symposium on},
pages = {31--38},
organization = {IEEE},
abstract = {We revisit wavelet compression by using a standards-based method to reduce large-scale data sizes for production scientific computing. Many of the bottlenecks in visualization and analysis come from limited bandwidth in data movement, from storage to networks. The majority of the processing time for visualization and analysis is spent reading or writing large-scale data or moving data from a remote site in a distance scenario. Using wavelet compression in JPEG 2000, we provide a mechanism to vary data transfer time versus data quality, so that a domain expert can improve data transfer time while quantifying compression effects on their data. By using a standards-based method, we are able to provide scientists with the state-of-the-art wavelet compression from the signal processing and data compression community, suitable for use in a production computing environment. To quantify compression effects, we focus on measuring bit rate versus maximum error as a quality metric to provide precision guarantees for scientific analysis on remotely compressed POP (Parallel Ocean Program) data.},
note = {LA-UR-pending},
keywords = {climate modeling, coding and information theory, data compaction and compression, JPEG 2000, Wavelet},
pubstate = {published},
tppubtype = {inproceedings}
}
Mitchell, Christopher; Ahrens, James; Wang, Jun
Visio: Enabling interactive visualization of ultra-scale, time series data via high-bandwidth distributed i/o systems Proceedings Article
In: Parallel & Distributed Processing Symposium (IPDPS), 2011 IEEE International, pp. 68–79, IEEE 2011, (LA-UR-10-07014).
Abstract | Links | BibTeX | Tags: distributed i/o, Parallel Computing, scientific visualization, ultra-scale, visualization
@inproceedings{mitchell2011visio,
title = {Visio: Enabling interactive visualization of ultra-scale, time series data via high-bandwidth distributed i/o systems},
author = {Christopher Mitchell and James Ahrens and Jun Wang},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/VisIO-IPDPS11.pdf},
year = {2011},
date = {2011-01-01},
booktitle = {Parallel & Distributed Processing Symposium (IPDPS), 2011 IEEE International},
pages = {68--79},
organization = {IEEE},
abstract = {Petascale simulations compute at resolutions ranging into billions of cells and write terabytes of data for visualization and analysis. Interactive visualization of this time series is a desired step before starting a new run. The I/O subsystem and associated network often are a significant impediment to interactive visualization of time-varying data; as they are not configured or provisioned to provide necessary I/O read rates. In this paper, we propose a new I/O library for visualization applications: VisIO. Visualization applications commonly use N- to-N reads within their parallel enabled readers which provides an incentive for a shared-nothing approach to I/O, similar to other data-intensive approaches such as Hadoop. However, unlike other data-intensive applications, visualization requires: (1) interactive performance for large data volumes, (2) compatibility with MPI and POSIX file system semantics for compatibility with existing infrastructure, and (3) use of existing file formats and their stipulated data partitioning rules. VisIO, provides a mechanism for using a non-POSIX distributed file system to provide linear scaling of I/O bandwidth. In addition, we introduce a novel scheduling algorithm that helps to co-locate visualization processes on nodes with the requested data. Testing using VisIO integrated into ParaView was conducted using the Hadoop Distributed File System (HDFS) on TACC’s Longhorn cluster. A representative dataset, VPIC, across 128 nodes showed a 64.4% read performance improvement compared to the provided Lustre installation. Also tested, was a dataset representing a global ocean salinity simulation that showed a 51.4% improvement in read performance over Lustre when using our VisIO system. VisIO, provides powerful high-performance I/O services to visualization applications, allowing for interactive performance with ultra-scale, time-series data.},
note = {LA-UR-10-07014},
keywords = {distributed i/o, Parallel Computing, scientific visualization, ultra-scale, visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
Williams, Sean; Hecht, Matthew; Petersen, Mark; Strelitz, Richard; Maltrud, Mathew; Ahrens, James; Hlawitschka, Mario; Hamann, Bernd
Visualization and analysis of eddies in a global ocean simulation Proceedings Article
In: Computer Graphics Forum, pp. 991–1000, Wiley Online Library 2011, (LA-UR-11-02104).
Abstract | Links | BibTeX | Tags: Ocean General Circulation Models, oceanography simulation and modeling, simulation output analysis
@inproceedings{williams2011visualization,
title = {Visualization and analysis of eddies in a global ocean simulation},
author = {Sean Williams and Matthew Hecht and Mark Petersen and Richard Strelitz and Mathew Maltrud and James Ahrens and Mario Hlawitschka and Bernd Hamann},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/VisualizationAndAnalysisOfEddiesInAGlobalOceanSimulation.pdf},
year = {2011},
date = {2011-01-01},
booktitle = {Computer Graphics Forum},
volume = {30},
number = {3},
pages = {991--1000},
organization = {Wiley Online Library},
abstract = {We present analysis and visualization of flow data from a high-resolution sim ulation of the dynamical behavior of the global ocean. Of particular scientific interest are coherent vortical features called mesoscale eddies. We first extract high-vorticity features using a metric from the oceanography comm unity called the Okubo-Weiss parameter. We then use a new circularity criterion to differentiate eddies from other no n-eddy features like meanders in strong background currents. From these data, we generate visualizatio ns showing the three-dimensional structure and distribution of ocean eddies. Additionally, the characteristics of each e ddy are recorded to form an eddy census that can be used to investigate correlations among variables such as e ddy thickness, depth, and location. From these analyses, we gain insight into the role eddies play in large-scale ocea n circulation.},
note = {LA-UR-11-02104},
keywords = {Ocean General Circulation Models, oceanography simulation and modeling, simulation output analysis},
pubstate = {published},
tppubtype = {inproceedings}
}
Ahrens, James; Rogers, David; Springmeyer, Becky; Brugger, Eric; Crossno, Patricia; Jiang, Ming; Harrison, Cyrus; Monroe, Laura; Tomlinson, Bob; Pavlakos, Dino; Childs, Hank; Klasky, Scott; Ma, Kwan-Liu
Visualization and data analysis at the exascale Technical Report
Los Alamos National Laboratory (LANL) 2011, (LLNL-TR-474731).
Abstract | Links | BibTeX | Tags: exascale, visualization and data analysis
@techreport{ahrens2011visualization,
title = {Visualization and data analysis at the exascale},
author = {James Ahrens and David Rogers and Becky Springmeyer and Eric Brugger and Patricia Crossno and Ming Jiang and Cyrus Harrison and Laura Monroe and Bob Tomlinson and Dino Pavlakos and Hank Childs and Scott Klasky and Kwan-Liu Ma},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/VisualizationAndDataAnalysisAtTheExascale.pdf},
year = {2011},
date = {2011-01-01},
institution = {Los Alamos National Laboratory (LANL)},
abstract = {The scope of our working group is scientific visualization and data analysis. Scientific visualization refers to the process of transforming scientific simulation and experimental data into images to facilitate visual understanding. Data analys is refers to the process of transforming data into an information-rich form via mathematical or computational algorithms to promote better understanding. We share scope on data management with the Storage group. Data management refers to the process of tracking, organizing and enhancing the use of scientific data. The purpose of our work is to enable scientific discovery and understanding. Visualization and data analysis has a broad scope as an integral part of scientific simulations and experiments; it is also a distinct separate service for scientific discovery, presen tation and documentation purposes. Our scope includes an exascale software and hardware infrastru cture that effectively supports visualization and data analysis.},
note = {LLNL-TR-474731},
keywords = {exascale, visualization and data analysis},
pubstate = {published},
tppubtype = {techreport}
}
Ahrens, James; Patchett, John; Lo, Li-Ta; Mitchell, Christopher; Marle, David; Brownlee, Carson
A Report Documenting the Completion of the Los Alamos National Laboratory Portion of the ASC Level II Milestone Technical Report
2011, (LA-UR-11-00494).
@techreport{info:lanl-repo/lareport/LA-UR-11-00494,
title = {A Report Documenting the Completion of the Los Alamos National Laboratory Portion of the ASC Level II Milestone },
author = {James Ahrens and John Patchett and Li-Ta Lo and Christopher Mitchell and David Marle and Carson Brownlee},
url = {http://datascience.dsscale.org/wp-content/uploads/2017/09/LA-UR-11-00494.pdf},
year = {2011},
date = {2011-01-01},
note = {LA-UR-11-00494},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
2010
Patchett, John; Ahrens, James; Lo, Li-Ta; Browniee, Carson; Mitchell, Christopher; Hansen, Chuck
Approaching the Exa-Scale: a Real-World Evaluation of Rendering Extremely Large Data Sets Journal Article
In: 2010.
Abstract | Links | BibTeX | Tags: exascale, large-scale data
@article{Patchett_Ahrens_Lo_Browniee_Mitchell_Hansen_2010,
title = {Approaching the Exa-Scale: a Real-World Evaluation of Rendering Extremely Large Data Sets},
author = {John Patchett and James Ahrens and Li-Ta Lo and Carson Browniee and Christopher Mitchell and Chuck Hansen},
url = {http://datascience.dsscale.org/wp-content/uploads/2017/09/LA-UR-10-07016.pdf},
year = {2010},
date = {2010-10-01},
abstract = {Extremely large scale analysis is becoming increasingly important as supercomputers and their simulations move from petascale to exascale. The lack of dedicated hardware acceleration for rendering on today’s supercomputing platforms motivates our detailed evaluation of the possibility of interactive rendering on the supercomputer. In order to facilitate our understanding of rendering on the supercomputing platform, we focus on scalability of rendering algorithms and architecture envisioned for exascale datasets. To understand tradeoffs for dealing with extremely large datasets, we compare three different rendering algorithms for large polygonal data: software based ray tracing, software based rasterization and hardware accelerated rasterization. We present a case study of strong and weak scaling of rendering extremely large data on both GPU and CPU based parallel supercomputers using Para View, a parallel visualization tool. We use three different data sets: two synthetic and one from a scientific application. At an extreme scale, algorithmic rendering choices make a difference and should be considered while approaching exascale computing, visualization, and analysis. We find software based ray-tracing offers a viable approach for scalable rendering of the projected future massive data sizes.},
keywords = {exascale, large-scale data},
pubstate = {published},
tppubtype = {article}
}
Ahrens, James; Fout, Nathaniel; Ma, Kwan-Liu
Time varying, multivariate volume data reduction Proceedings Article
In: 2010, (LA-UR-10-02243).
Abstract | Links | BibTeX | Tags: data reduction, multivariate, time-varying
@inproceedings{Ahrens2010,
title = {Time varying, multivariate volume data reduction},
author = {James Ahrens and Nathaniel Fout and Kwan-Liu Ma},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/Time-VaryingMltivariateVolumeDataReduction.pdf},
year = {2010},
date = {2010-01-01},
institution = {Los Alamos National Laboratory (LANL)},
abstract = {Large-scale supercomputing is revolutionizing the way science is conducted. A growing challenge, however, is understanding the massive quantities of data produced by large-scale simulations. The data, typically time-varying, multi-variate, and volumetric, can occupy from hundreds of giga-bytes to several terabytes of storage space. Transferring and processing volume data of such sizes is prohibitively expensive and resource intensive. Although it may not be possible to entirely alleviate these problems, data compression should be considered as part of a viable solution, especially when the primary means of data analysis is volume rendering. In this paper we present our study of multivariate compression, which exploits correlations among related variables, for volume rendering. Two configurations for multidimensional compression based on vector quantization are examined. We emphasize quality reconstruction and interactive rendering, which leads us to a solution using graphics hardware to perform on-the-fly decompression during rendering.},
note = {LA-UR-10-02243},
keywords = {data reduction, multivariate, time-varying},
pubstate = {published},
tppubtype = {inproceedings}
}
Ahrens, James; Heitmann, Katrin; Petersen, Mark; Woodring, Jonathan; Williams, Sean; Fasel, Patricia; Ahrens, Christine; Hsu, Chung-Hsing; Geveci, Berk
Verification of Scientific Simulations via Hypothesis-Driven Comparative and Quantitative Visualization Journal Article
In: IEEE Computer Graphics and Applications Magazine, vol. 30, no. 6, 2010, (LA-UR-10-07017).
Abstract | Links | BibTeX | Tags: hypothesis testing, Space, Visual Evidence; Feature Detection and Tracking, Visualization in Earth
@article{ahrens2010verification,
title = {Verification of Scientific Simulations via Hypothesis-Driven Comparative and Quantitative Visualization},
author = {James Ahrens and Katrin Heitmann and Mark Petersen and Jonathan Woodring and Sean Williams and Patricia Fasel and Christine Ahrens and Chung-Hsing Hsu and Berk Geveci},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/VerificationOfTheScientificSimulationsViaHypothesis-DrivenComparativeAndQuantitativeVisualization.pdf},
year = {2010},
date = {2010-01-01},
journal = {IEEE Computer Graphics and Applications Magazine},
volume = {30},
number = {6},
publisher = {Oak Ridge National Laboratory (ORNL)},
abstract = {We describe a visualization-assisted process for the verification of scientific simulation codes. The need for code verification stems from the requirement for very accurate predictions to interpret data confidently. We compare different cosmological and oceanographic simulations to reliably predict differences in simulation results. Our verification consists of the integration of an iterative hypothesis-verification process with comparative, feature, and quantitative visualization. We validate this process by verifying the results of different cosmology and oceanographic simulation codes.},
note = {LA-UR-10-07017},
keywords = {hypothesis testing, Space, Visual Evidence; Feature Detection and Tracking, Visualization in Earth},
pubstate = {published},
tppubtype = {article}
}
Hsu, Chung-Hsing; Ahrens, James; Heitmann, Katrin
Verification of the time evolution of cosmological simulations via hypothesis-driven comparative and quantitative visualization Proceedings Article
In: Pacific Visualization Symposium (PacificVis), 2010 IEEE, pp. 81–88, IEEE 2010, (LA-UR-09-08278).
Abstract | Links | BibTeX | Tags: environmental sciences, Space, Visual Evidence; Feature Detection and Tracking, Visualization in Earth
@inproceedings{hsu2010verification,
title = {Verification of the time evolution of cosmological simulations via hypothesis-driven comparative and quantitative visualization},
author = {Chung-Hsing Hsu and James Ahrens and Katrin Heitmann},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/VerificationOfTheTimeEvolutionOfCosmologicalSimulationsViaHyphothesis-DrivenComparativeAndQuantitativeVisualization.pdf},
year = {2010},
date = {2010-01-01},
booktitle = {Pacific Visualization Symposium (PacificVis), 2010 IEEE},
pages = {81--88},
organization = {IEEE},
abstract = {We describe a visualization-assisted process for the verification of cosmological simulation codes. The need for code verification stems from the requirement for very accurate predictions in order to interpret observational data confidently. We compare different simulation algorithms in order to reliably predict differences in simulation results and understand their dependence on input parameter settings. Our verification process consists of the integration of iterative hypothesis-verification with comparative, feature and quantitative visualization. We validate this process by verifying the time evolution results of three different cosmology simulation codes. The purpose of this verification is to study the accuracy of AMR methods versus other N-body simulation methods for cosmological simulations.},
note = {LA-UR-09-08278},
keywords = {environmental sciences, Space, Visual Evidence; Feature Detection and Tracking, Visualization in Earth},
pubstate = {published},
tppubtype = {inproceedings}
}
Ahrens, James; Patchett, John; Lo, Li-Ta; DeMarle, David; Brownlee, Carson; Mitchell, Christopher
2010, (LA-UR-11-00494).
Abstract | Links | BibTeX | Tags: L2
@techreport{Ahrens2010,
title = {A Report Documenting the Completion of the Los Alamos National Laboratory Portion of the ASC Level II Milestone, Visualization on the Supercomputing Platform, ASC Level II Milestone Meeting},
author = {James Ahrens and John Patchett and Li-Ta Lo and David DeMarle and Carson Brownlee and Christopher Mitchell},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/VisualizationOnTheSupercomputingPlatform.pdf},
year = {2010},
date = {2010-01-01},
abstract = {This report provides documentation for the completion of the Los Alamos portioin of the ASC Level II 'Visualization on the Supercomputing Platform' milestone.},
note = {LA-UR-11-00494},
keywords = {L2},
pubstate = {published},
tppubtype = {techreport}
}
Ahrens, James; Heitmann, Katrin; Petersen, Mark; Woodring, Jonathan; Williams, Sean; Fasel, Patricia; Ahrens, Christine; Hsu, Chung-Hsing; Geveci, Berk
Verifying Scientific Simulations via Comparative and Quantitative Visualization Journal Article
In: IEEE Computer Graphics and Applications, vol. 30, no. 6, pp. 16-28, 2010, ISSN: 0272-1716.
Abstract | Links | BibTeX | Tags: meteorology, visualization
@article{LAPR-2010-010807,
title = {Verifying Scientific Simulations via Comparative and Quantitative Visualization},
author = {James Ahrens and Katrin Heitmann and Mark Petersen and Jonathan Woodring and Sean Williams and Patricia Fasel and Christine Ahrens and Chung-Hsing Hsu and Berk Geveci},
url = {http://ieeexplore.ieee.org/document/5560617/},
doi = {10.1109/MCG.2010.100},
issn = {0272-1716},
year = {2010},
date = {2010-01-01},
journal = {IEEE Computer Graphics and Applications},
volume = {30},
number = {6},
pages = {16-28},
abstract = {This article presents a visualization-assisted process that veri es scienti c- simulation codes. Code veri cation is necessary because scientists require accurate predictions to interpret data con dently. This veri cation process integrates iterative hypothesis veri cation with comparative, feature, and quantitative visualization. Following this process can help identify differences in cosmological and oceanographic simulations.},
keywords = {meteorology, visualization},
pubstate = {published},
tppubtype = {article}
}
2009
Habib, Salman; Pope, Adrian; Lukic, Zarija; Daniel, David; Fasel, Patricia; Desai, Nehal; Heitmann, Katrin; Hsu, Chung-Hsing; Ankeny, Lee; Mark, Graham
Hybrid petacomputing meets cosmology: The Roadrunner Universe project Proceedings Article
In: Journal of Physics: Conference Series, pp. 012019, IOP Publishing 2009, (LA-UR-09-03785).
Abstract | Links | BibTeX | Tags: cosmology, petacomputing
@inproceedings{habib2009hybrid,
title = {Hybrid petacomputing meets cosmology: The Roadrunner Universe project},
author = {Salman Habib and Adrian Pope and Zarija Lukic and David Daniel and Patricia Fasel and Nehal Desai and Katrin Heitmann and Chung-Hsing Hsu and Lee Ankeny and Graham Mark},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/HybridPetacomputingMeetsCosmologyTheRoadrunnerUniverseProject.pdf},
year = {2009},
date = {2009-01-01},
booktitle = {Journal of Physics: Conference Series},
volume = {180},
number = {1},
pages = {012019},
organization = {IOP Publishing},
abstract = {Over the last two decades, critical observational advances in large-volume sky surveys carried out over a wide range of wavelengths, as well as over short time cadences, have revolutionized cosmology. Computational cosmology has emerged as an essential resource for providing detailed predictions for these observations, essential data for assisting in their design, and sophisticated tools for interpreting the final results.},
note = {LA-UR-09-03785},
keywords = {cosmology, petacomputing},
pubstate = {published},
tppubtype = {inproceedings}
}
Ahrens, James; Woodring, Jonathan; DeMarle, David; Patchett, John; Maltrud, Mathew
Interactive remote large-scale data visualization via prioritized multi-resolution streaming Proceedings Article
In: Proceedings of the 2009 Workshop on Ultrascale Visualization, pp. 1–10, ACM 2009, (LA-UR-10-02112).
Abstract | Links | BibTeX | Tags: remote systems
@inproceedings{ahrens2009interactive,
title = {Interactive remote large-scale data visualization via prioritized multi-resolution streaming},
author = {James Ahrens and Jonathan Woodring and David DeMarle and John Patchett and Mathew Maltrud},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/InteractiveRemoteLarge-ScaleDataVisualizationViaPrioritizedMulti-resolutionStreaming2.pdf},
year = {2009},
date = {2009-01-01},
booktitle = {Proceedings of the 2009 Workshop on Ultrascale Visualization},
pages = {1--10},
organization = {ACM},
abstract = {The simulations that run on petascale and future exascale supercomputers pose a difficult challenge for scientists to visualize and analyze their results remotely. They are limited in their ability to interactively visualize their data mainly due to limited network bandwidth associated with sending and reading large data at a distance. To tackle this issue, we provide a generalized distance visualization architecture for large remote data that aims to provide interactive analysis. We achieve this through a prioritized, multi-resolution, streaming architecture. Since the original data size is several orders of magnitude greater than the display and network technologies, we stream downsampled versions of representation data over time to complete a visualization using fast local rendering. This technique provides the necessary interactivity and full-resolution results dynamically on demand while maintaining a full-featured visualization framework.},
note = {LA-UR-10-02112},
keywords = {remote systems},
pubstate = {published},
tppubtype = {inproceedings}
}
Woodring, Jonathan; Shen, Han-Wei
Multiscale time activity data exploration via temporal clustering visualization spreadsheet Journal Article
In: Visualization and Computer Graphics, IEEE Transactions on, vol. 15, no. 1, pp. 123–137, 2009.
Abstract | Links | BibTeX | Tags: animation, clustering, filter banks, K-means, time histogram, time-varying, transfer function, visualization spreadsheet, Wavelet
@article{woodring2009multiscale,
title = {Multiscale time activity data exploration via temporal clustering visualization spreadsheet},
author = {Jonathan Woodring and Han-Wei Shen},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/MultiscaleTimeActivityDataExplorationViaTemporalClusteringVisualizationSpreadsheet.pdf},
year = {2009},
date = {2009-01-01},
journal = {Visualization and Computer Graphics, IEEE Transactions on},
volume = {15},
number = {1},
pages = {123--137},
publisher = {IEEE},
abstract = {Time-varying data is usually explored by animation or arrays of static images. Neither is particularly effective for classifying data by different temporal activities. Important temporal trends can be missed due to the lack of ability to find them with current visualization methods. In this paper, we propose a method to explore data at different temporal resolutions to discover and highlight data based upon time-varying trends. Using the wavelet transform along the time axis, we transform data points into multiscale time series curve sets. The time curves are clustered so that data of similar activity are grouped together at different temporal resolutions. The data are displayed to the user in a global time view spreadsheet, where she is able to select temporal clusters of data points and filter and brush data across temporal scales. With our method, a user can interact with data based on time activities and create expressive visualizations.},
keywords = {animation, clustering, filter banks, K-means, time histogram, time-varying, transfer function, visualization spreadsheet, Wavelet},
pubstate = {published},
tppubtype = {article}
}
Patchett, John; Ahrens, James; Ahern, Sean; Pugmire, David
Parallel visualization and analysis with ParaView on a Cray Xt4 Journal Article
In: Cray User Group, 2009, (LA-UR-10-02238).
Abstract | Links | BibTeX | Tags: Parallel Visualization, ParaView
@article{patchett2009parallel,
title = {Parallel visualization and analysis with ParaView on a Cray Xt4},
author = {John Patchett and James Ahrens and Sean Ahern and David Pugmire},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/ParallelVisualizationAndAnalysisWithParaViewOnACrayXT4.pdf},
year = {2009},
date = {2009-01-01},
journal = {Cray User Group},
abstract = {Scientific data sets produced by modern supercomputers like ORNL’s Cray XT 4, Jaguar, can be extremely large, making visualization and analysis more difficult as moving large resultant data to dedicated analysis systems can be pro- hibitively expensive. We share our continuing work of integrating a parallel visu- alization system, ParaView, on ORNL’s Jaguar system and our efforts to enable extreme scale interactive data visualization and analysis. We will discuss porting challenges and present performance numbers.},
note = {LA-UR-10-02238},
keywords = {Parallel Visualization, ParaView},
pubstate = {published},
tppubtype = {article}
}
Woodring, Jonathan; Shen, Han-Wei
Semi-Automatic Time-Series Transfer Functions via Temporal Clustering and Sequencing Proceedings Article
In: Computer Graphics Forum, pp. 791–798, Wiley Online Library 2009.
Abstract | Links | BibTeX | Tags: temporal clustering and Sequencing, time-series transfer functions
@inproceedings{woodring2009semi,
title = {Semi-Automatic Time-Series Transfer Functions via Temporal Clustering and Sequencing},
author = {Jonathan Woodring and Han-Wei Shen },
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/Semi-AutomaticTime-SeriesTransferFunctionsViaTemporalClusteringAndSequencing.pdf},
year = {2009},
date = {2009-01-01},
booktitle = {Computer Graphics Forum},
volume = {28},
number = {3},
pages = {791--798},
organization = {Wiley Online Library},
abstract = {When creating transfer functions for time-varying data, it is not clear what range of values to use for classification, as data value ranges and distributions change over time. In order to generate time-varying transfer functions, we search the data for classes that have similar behavior over time, assuming that data points that behave similarly belong to the same feature. We utilize a method we call temporal clustering and sequencing to find dynamic features in value space and create a corresponding transfer function. First, clustering finds groups of data points that have the same value space activity over time. Then, sequencing derives a progression of clusters over time, creating chains that follow value distribution changes. Finally, the cluster sequences are used to create transfer functions, as sequences describe the value range distributions over time in a data set.},
keywords = {temporal clustering and Sequencing, time-series transfer functions},
pubstate = {published},
tppubtype = {inproceedings}
}
Santos, Emanuele; Lins, Lauro; Ahrens, James; Freire, Juliana; Silva, Claudio T
Vismashup: Streamlining the creation of custom visualization applications Journal Article
In: Visualization and Computer Graphics, IEEE Transactions on, vol. 15, no. 6, pp. 1539–1546, 2009, (LA-UR-10-02240).
Abstract | Links | BibTeX | Tags: Dataflow, scientific visualization, Visualization Systems
@article{santos2009vismashup,
title = {Vismashup: Streamlining the creation of custom visualization applications},
author = {Emanuele Santos and Lauro Lins and James Ahrens and Juliana Freire and Claudio T Silva},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/VISMASHUPStreamliningTheCreationOfCustomVisualizationApplications.pdf},
year = {2009},
date = {2009-01-01},
journal = {Visualization and Computer Graphics, IEEE Transactions on},
volume = {15},
number = {6},
pages = {1539--1546},
publisher = {IEEE},
abstract = {Visualization is essential for understanding the increasing volumes of digital data. However, the process required to create insightful visualizations is involved and time consuming. Although several visualization tools are available, including tools with sophisticated visual interfaces, they are out of reach for users who have little or no knowledge of visualization techniques and/or who do not have programming expertise. In this paper, we propose VISMASHUP, a new framework for streamlining the creation of customized visualization applications. Because these applications can be customized for very specific tasks, they can hide much of the complexity in a visualization specification and make it easier for users to explore visualizations by manipulating a small set of parameters. We describe the framework and how it supports the various tasks a designer needs to carry out to develop an application, from mining and exploring a set of visualization specifications (pipelines), to the creation of simplified views of the pipelines, and the automatic generation of the application and its interface. We also describe the implementation of the system and demonstrate its use in two real application scenarios.},
note = {LA-UR-10-02240},
keywords = {Dataflow, scientific visualization, Visualization Systems},
pubstate = {published},
tppubtype = {article}
}
2008
Santos, Emanuele; Lins, Lauro; Ahrens, James; Freire, Juliana; Silva, Claudio T
A first study on clustering collections of workflow graphs Book Chapter
In: Provenance and Annotation of Data and Processes, pp. 160–173, Springer, 2008, (LA-UR-10-02553).
Abstract | Links | BibTeX | Tags: clustering, workflow
@inbook{Santos2008,
title = {A first study on clustering collections of workflow graphs},
author = {Emanuele Santos and Lauro Lins and James Ahrens and Juliana Freire and Claudio T Silva},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/AFirstStudyOnClusteringCollectionsOfWorkflowGraphs.pdf},
year = {2008},
date = {2008-01-01},
booktitle = {Provenance and Annotation of Data and Processes},
pages = {160--173},
publisher = {Springer},
abstract = {As work ow systems get more widely used, the number of work ows and the volume of provenance they generate has grown considerably. New tools and infrastructure are needed to allow users to interact with, reason about, and re-use this information. In this paper, we explore the use of clustering techniques to organize large collections of work ow and provenance graphs. We propose two diㄦent representations for these graphs and present an experimental evaluation, using a collection of 1,700 work ow graphs, where we study the trade-oóof these representations and the ectiveness of alternative clustering techniques.},
note = {LA-UR-10-02553},
keywords = {clustering, workflow},
pubstate = {published},
tppubtype = {inbook}
}
Ahrens, James; Lo, Li-Ta; Nouanesengsy, Boonthanome; Patchett, John; McPherson, Allen
Petascale visualization: Approaches and initial results Proceedings Article
In: Ultrascale Visualization, 2008. UltraVis 2008. Workshop on, pp. 24–28, IEEE 2008, (LA-UR-10-02237).
Abstract | Links | BibTeX | Tags: petascale, visualization
@inproceedings{ahrens2008petascale,
title = {Petascale visualization: Approaches and initial results},
author = {James Ahrens and Li-Ta Lo and Boonthanome Nouanesengsy and John Patchett and Allen McPherson},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/PetascaleVisualizationApproachesAndInitialResults.pdf},
year = {2008},
date = {2008-01-01},
booktitle = {Ultrascale Visualization, 2008. UltraVis 2008. Workshop on},
pages = {24--28},
organization = {IEEE},
abstract = {With the advent of the first petascale supercomputer, Los Alamos’s Roadrunner, there is a pressing need to address how to visualize petascale data. The crux of the petascale visualization performance problem is interactive rendering, since it is the most computationally intensive portion of the visualization process. For terascale platforms, commodity clusters with graphics processors (GPUs) have been used for interactive rendering. For petascale platforms, visualization and rendering may be able to run efficiently on the supercomputer platform itself. In this work, we evaluated the rendering performance of multi-core CPU and GPU-based processors. To achieve high-performance on multi-core processors, we tested with multi-core optimized raytracing engines for rendering. For real-world performance testing, and to prepare for petascale visualization tasks, we interfaced these rendering engines with VTK and ParaView. Initial results show that rendering software optimized for multi-core CPU processors provides competitive performance to GPUs for the parallel rendering of massive data. The current architectural multi-core trend suggests multi-core based supercomputers are able to provide interactive visualization and rendering support now and in the future.},
note = {LA-UR-10-02237},
keywords = {petascale, visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
Anderson, Erik; Silva, Claudio T; Ahrens, James; Heitmann, Katrin; Habib, Salman
Provenance in comparative analysis: A study in cosmology Journal Article
In: Computing in Science & Engineering, vol. 10, no. 3, pp. 30–37, 2008, (LA-UR-08-02608).
Abstract | Links | BibTeX | Tags: Comparative Analysis, cosmology, Provenance
@article{anderson2008provenance,
title = {Provenance in comparative analysis: A study in cosmology},
author = {Erik Anderson and Claudio T Silva and James Ahrens and Katrin Heitmann and Salman Habib},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/ProvenanceInComparativeAnalysisAStudyInCosmology.pdf},
year = {2008},
date = {2008-01-01},
journal = {Computing in Science & Engineering},
volume = {10},
number = {3},
pages = {30--37},
publisher = {AIP Publishing},
abstract = {Provenance—the logging of information about how data came into being and how it was processed—is an essential aspect of managing large-scale simulation and data-intensive projects. Using a cosmology code comparison project as an example, this article presents how a provenance system can play a key role in such applications.},
note = {LA-UR-08-02608},
keywords = {Comparative Analysis, cosmology, Provenance},
pubstate = {published},
tppubtype = {article}
}
McCormick, Patrick; Anderson, Erik; Martin, Steven; Brownlee, Carson; Inman, Jeff; Maltrud, Mathew; Kim, Mark; Ahrens, James; Nau, Lee
Quantitatively driven visualization and analysis on emerging architectures Proceedings Article
In: Journal of Physics: Conference Series, pp. 012095, IOP Publishing 2008, (LA-UR-10-02239).
Abstract | Links | BibTeX | Tags: emerging architectures, quantitatively driven visualization, visualization
@inproceedings{mccormick2008quantitatively,
title = {Quantitatively driven visualization and analysis on emerging architectures},
author = {Patrick McCormick and Erik Anderson and Steven Martin and Carson Brownlee and Jeff Inman and Mathew Maltrud and Mark Kim and James Ahrens and Lee Nau},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/QuantitativelyDrivenVisualizationAndAnalysisOnEmergingArchitectures.pdf},
year = {2008},
date = {2008-01-01},
booktitle = {Journal of Physics: Conference Series},
volume = {125},
number = {1},
pages = {012095},
organization = {IOP Publishing},
abstract = {We live in a world of ever-increasing amounts of information that is not only dynamically changing but also dramatically changing in complexity. This trend of “information overload” has quickly overwhelmed our capabilities to explore, hypothesize, and thus fully interpret the underlying details in these data. To further complicate matters, the computer architectures that have traditionally provided improved performance are undergoing a revolutionary change as manufacturers transition to building multi- and many-core processors. While these trends have the potential to lead to new scientific breakthroughs via simulation and modeling, they will do so in a disruptive manner, potentially placing a significant strain on software development activities including the overall data analysis process. In this paper we explore an approach that exploits these emerging architectures to provide an integrated environment for high-performance data analysis and visualization.},
note = {LA-UR-10-02239},
keywords = {emerging architectures, quantitatively driven visualization, visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
Heitmann, Katrin; Lukic, Zarija; Fasel, Patricia; Habib, Salman; Warren, Michael S.; White, Martin; Ahrens, James; Ankeny, Lee; Armstrong, Ryan; O’Shea, Brian; Ricker, Paul M.; Springel, Volker; Stadel, Joachim; Trac, Hy
The cosmic code comparison project Journal Article
In: Computational Science & Discovery, vol. 1, no. 1, pp. 015003, 2008, (LA-UR-07-1953).
Abstract | Links | BibTeX | Tags: cosmology
@article{heitmann2008cosmic,
title = {The cosmic code comparison project},
author = {Katrin Heitmann and Zarija Lukic and Patricia Fasel and Salman Habib and Michael S. Warren and Martin White and James Ahrens and Lee Ankeny and Ryan Armstrong and Brian O’Shea and Paul M. Ricker and Volker Springel and Joachim Stadel and Hy Trac},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/TheCosmicCodeComparisonProject.pdf},
year = {2008},
date = {2008-01-01},
journal = {Computational Science & Discovery},
volume = {1},
number = {1},
pages = {015003},
publisher = {IOP Publishing},
abstract = {Current and upcoming cosmological observations allow us to probe structures on smaller and smaller scales, entering highly nonlinear regimes. In order to obtain theoretical predictions in these regimes, large cosmological simulations have to be carried out. The promised high accuracy from observations make the simulation task very demanding: the simulations have to be at least as accurate as the observations. This requirement can only be fullled by carrying out an extensive code validation program. The rst step of such a program is the comparison of diㄦent cosmology codes including gravitation interactions only. In this paper we extend a recently carried out code comparison project to include five more simulation codes. We restrict our analysis to a small cosmological volume which allows us to investigate properties of halos. For the matter power spectrum and the mass function, the previous results hold, with the codes agreeing at the 10% level over wide dynamic ranges. We extend our analysis to the comparison of halo profiles and investigate the halo count as a function of local density. We introduce and discuss ParaView as a exible analysis tool for cosmological simulations, the use of which immensely simplies the code comparison task.},
note = {LA-UR-07-1953},
keywords = {cosmology},
pubstate = {published},
tppubtype = {article}
}
2007
Ahrens, James; Desai, Nehal; McCormick, Patrick; Martin, Ken; Woodring, Jonathan
A modular extensible visualization system architecture for culled prioritized data streaming Proceedings Article
In: Electronic Imaging 2007, pp. 64950I–64950I, International Society for Optics and Photonics 2007, (LA-UR-07-5141).
Abstract | Links | BibTeX | Tags: data streaming, visualization
@inproceedings{ahrens2007modular,
title = {A modular extensible visualization system architecture for culled prioritized data streaming},
author = {James Ahrens and Nehal Desai and Patrick McCormick and Ken Martin and Jonathan Woodring},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/AModularExtensibleVisualizationSystemArchitectureForCulledPrioritizedDataStreaming.pdf},
year = {2007},
date = {2007-01-01},
booktitle = {Electronic Imaging 2007},
pages = {64950I--64950I},
organization = {International Society for Optics and Photonics},
abstract = {Massive dataset sizes can make visualization difficult or impossible. One solution to this problem is to divide a dataset into smaller pieces and then stream these pieces through memory, running algorithms on each piece. This paper presents a modular data-flow visualization system architecture for culling and prioritized data streaming. This streaming architecture improves program performance both by discarding pieces of the input dataset that are not required to complete the visualization, and by prioritizing the ones that are. The system supports a wide variety of culling and prioritization techniques, including those based on data value, spatial constraints, and occlusion tests. Prioritization ensures that pieces are processed and displayed progressively based on an estimate of their contribution to the resulting image. Using prioritized ordering, the architecture presents a progressively rendered result in a significantly shorter time than a standard visualization architecture. The design is modular, such that each module in a user-defined data-flow visualization program can cull pieces as well as contribute to the final processing order of pieces. In addition, the design is extensible, providing an interface for the addition of user-defined culling and prioritization techniques to new or existing visualization modules.},
note = {LA-UR-07-5141},
keywords = {data streaming, visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
Woodring, Jonathan; Shen, Han-Wei
Incorporating highlighting animations into static visualizations Proceedings Article
In: Electronic Imaging 2007, pp. 649503–649503, International Society for Optics and Photonics 2007.
Abstract | Links | BibTeX | Tags: animation, highlighting, visualization
@inproceedings{woodring2007incorporating,
title = {Incorporating highlighting animations into static visualizations},
author = {Jonathan Woodring and Han-Wei Shen},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/IncorporatingHighlightingAnimationsIntoStaticVisualizations.pdf},
year = {2007},
date = {2007-01-01},
booktitle = {Electronic Imaging 2007},
pages = {649503--649503},
organization = {International Society for Optics and Photonics},
abstract = {Rendering a lot of data results in cluttered visualizations. It is difficult for a user to find regions of interest from contextual data especially when occlusion is considered. We incorporate animations into visualization by adding positional motion and opacity change as a highlighting mechanism. By leveraging our knowledge on motion perception, we can help a user to visually filter out her selected data by rendering it with animation. Our framework of adding animation is the animation transfer function, where it provides a mapping from data and animation frame index to a changing visual property. The animation transfer function describes animations for user selected regions of interest. In addition to our framework, we explain the implementation of animations as a modification of the rendering pipeline. The animation rendering pipeline allows us to easily incorporate animations into existing software and hardware based volume renderers.},
keywords = {animation, highlighting, visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
McCormick, Patrick; Inman, Jeff; Ahrens, James; Mohd-Yusof, Jamaludin; Roth, Greg; Cummins, Sharen
Scout: a data-parallel programming language for graphics processors Journal Article
In: Parallel Computing, vol. 33, no. 10, pp. 648–662, 2007, (LA-UR-07-2094).
Abstract | Links | BibTeX | Tags: data-parallel, Graphics Systems
@article{mccormick2007scout,
title = {Scout: a data-parallel programming language for graphics processors},
author = {Patrick McCormick and Jeff Inman and James Ahrens and Jamaludin Mohd-Yusof and Greg Roth and Sharen Cummins},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/ScoutADataParallelProgrammingLanguageForGraphicsProcessors.pdf},
year = {2007},
date = {2007-01-01},
journal = {Parallel Computing},
volume = {33},
number = {10},
pages = {648--662},
publisher = {Elsevier},
abstract = {Commodity graphics hardware has seen incredible growth in terms of performance, programmability, and arithmetic precision. Even though these trends have been primarily driven by the entertainment industry, the price-to-performance ratio of graphics processors (GPUs) has attracted the attention of many within the high-performance computing community. While the performance of the GPU is well suited for computational science, the programming interface, and several hardware limitations, have prevented their wide adoption. In this paper we present Scout, a data-parallel programming language for graphics processors that hides the nuances of both the underlying hardware and supporting graphics software layers. In addition to general-purpose programming constructs, the language provides extensions for scientific visualization operations that support the exploration of existing or computed data sets.},
note = {LA-UR-07-2094},
keywords = {data-parallel, Graphics Systems},
pubstate = {published},
tppubtype = {article}
}
2006
Woodring, Jonathan; Shen, Han-Wei
Multi-variate, time varying, and comparative visualization with contextual cues Journal Article
In: Visualization and Computer Graphics, IEEE Transactions on, vol. 12, no. 5, pp. 909–916, 2006.
Abstract | Links | BibTeX | Tags: comparative, focus + context, multi-variate, time-varying
@article{woodring2006multi,
title = {Multi-variate, time varying, and comparative visualization with contextual cues},
author = {Jonathan Woodring and Han-Wei Shen},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/Multi-variateTime-varyingAndComparativeVisualizationWithContextualCues.pdf},
year = {2006},
date = {2006-01-01},
journal = {Visualization and Computer Graphics, IEEE Transactions on},
volume = {12},
number = {5},
pages = {909--916},
publisher = {IEEE},
abstract = {Time-varying, multi-variate, and comparative data sets are not easily visualized due to the amount of data that is presented to the user at once. By combining several volumes together with different operators into one visualized volume, the user is able to compare values from different data sets in space over time, run, or field without having to mentally switch between different renderings of individual data sets. In this paper, we propose using a volume shader where the user is given the ability to easily select and operate on many data volumes to create comparison relationships. The user specifies an expression with set and numerical operations and her data to see relationships between data fields. Furthermore, we render the contextual information of the volume shader by converting it to a volume tree. We visualize the different levels and nodes of the volume tree so that the user can see the results of suboperations. This gives the user a deeper understanding of the final visualization, by seeing how the parts of the whole are operationally constructed.},
keywords = {comparative, focus + context, multi-variate, time-varying},
pubstate = {published},
tppubtype = {article}
}
Ahrens, James; Heitmann, Katrin; Habib, Salman; Ankeny, Lee; McCormick, Patrick; Inman, Jeff; Armstrong, Ryan; Ma, Kwan-Liu
Quantitative and comparative visualization applied to cosmological simulations Proceedings Article
In: Journal of Physics: Conference Series, pp. 526, IOP Publishing 2006, (LA-UR-06-4416).
Abstract | Links | BibTeX | Tags: comparative visualization, cosmology, Quantitative visualization
@inproceedings{ahrens2006quantitative,
title = {Quantitative and comparative visualization applied to cosmological simulations},
author = {James Ahrens and Katrin Heitmann and Salman Habib and Lee Ankeny and Patrick McCormick and Jeff Inman and Ryan Armstrong and Kwan-Liu Ma},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/QuantitativeAndComparativeVisualizationAppliedToCosmologicalSimulations.pdf},
year = {2006},
date = {2006-01-01},
booktitle = {Journal of Physics: Conference Series},
volume = {46},
number = {1},
pages = {526},
organization = {IOP Publishing},
abstract = {Cosmological simulations follow the formation of nonlinear structure in dark and luminous matter. The associated simulation volumes and dynamic range are very large, making visualization both a necessary and challenging aspect of the analysis of these datasets. Our goal is to understand sources of inconsistency between different simulation codes that are started from the same initial conditions. Quantitative visualization supports the definition and reasoning about analytically defined features of interest. Comparative visualization supports the ability to visually study, side by side, multiple related visualizations of these simulations. For instance, a scientist can visually distinguish that there are fewer halos (localized lumps of tracer particles) in low-density regions for one simulation code out of a collection. This qualitative result will enable the scientist to develop a hypothesis, such as loss of halos in low-density regions due to limited resolution, to explain the inconsistency between the different simulations. Quantitative support then allows one to confirm or reject the hypothesis. If the hypothesis is rejected, this step may lead to new insights and a new hypothesis, not available from the purely qualitative analysis. We will present methods to significantly improve the scientific analysis process by incorporating quantitative analysis as the driver for visualization. Aspects of this work are included as part of two visualization tools, ParaView, an open-source large data visualization tool, and Scout, an analysis-language based, hardware-accelerated visualization tool.},
note = {LA-UR-06-4416},
keywords = {comparative visualization, cosmology, Quantitative visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
Ahrens, James; Moreland, Kenneth; Geveci, Berk; Cedilnik, Andy; Favre, Jean
Remote large data visualization in the paraview framework Proceedings Article
In: Proceedings of the 6th Eurographics conference on Parallel Graphics and Visualization, pp. 163–170, Eurographics Association 2006, (LA-UR-10-02236).
Abstract | Links | BibTeX | Tags: computer graphics, parallel processing
@inproceedings{cedilnik2006remote,
title = {Remote large data visualization in the paraview framework},
author = {James Ahrens and Kenneth Moreland and Berk Geveci and Andy Cedilnik and Jean Favre},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/RemoteLargeDataVisualizationInTheParaViewFramework.pdf},
year = {2006},
date = {2006-01-01},
booktitle = {Proceedings of the 6th Eurographics conference on Parallel Graphics and Visualization},
pages = {163--170},
organization = {Eurographics Association},
abstract = {Scientists are using remote parallel computing resources to run scientific simulations to model a range of scientific problems. Visualization tools are used to understand the massive datasets that result from these simulations. A number of problems need to be overcome in order to create a visualization tool that effectively visualizes these datasets under this scenario. Problems include how to effectively process and display massive datasets and how to effectively communicate data and control information between the geographically distributed computing and visualization resources. We believe a solution that incorporates a data parallel data server, a data parallel rendering server and client controller is key. Using this data server, render server, client model as a basis, this paper describes in detail a set of integrated solutions to remote/distributed visualization problems including presenting an efficient M to N parallel algorithm for transferring geometry data, an effective server interface abstraction and parallel rendering techniques for a range of rendering modalities including tiled display walls and CAVEs.},
note = {LA-UR-10-02236},
keywords = {computer graphics, parallel processing},
pubstate = {published},
tppubtype = {inproceedings}
}
2005
Ahrens, James; Geveci, Berk; Law, Charles
ParaView: An End-User Tool for Large-Data Visualization Journal Article
In: The Visualization Handbook, pp. 717, 2005, (LA-UR-03-1560).
Abstract | Links | BibTeX | Tags: ParaView, visualization
@article{ahrens200536,
title = {ParaView: An End-User Tool for Large-Data Visualization},
author = {James Ahrens and Berk Geveci and Charles Law},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/ParaView.pdf},
year = {2005},
date = {2005-01-01},
journal = {The Visualization Handbook},
pages = {717},
publisher = {Citeseer},
abstract = {This paper describes the design and features of a visualization tool, called ParaViewi, a tool for scientists to visualize and analysis extremely large data sets. The tool provides a graphical user interface for the creation and dynamic execution of visualization tasks. ParaView transparently supports the visualization and rendering of large data sets by executing these programs in parallel on shared or distributed memory machines. ParaView supports hardware-accelerated parallel rendering and achieves interactive rendering performance via level-of-detail techniques. The design balances and integrates a number of diverse requirements including the ability to handle large data, ease of use and extensibility by developers. This paper describes the requirements that guided the design, identifies their importance to scientific users, and discusses key design decision and tradeoffs.},
note = {LA-UR-03-1560},
keywords = {ParaView, visualization},
pubstate = {published},
tppubtype = {article}
}
2004
McCormick, Patrick; Inman, Jeff; Ahrens, James; Hansen, Charles; Roth, Greg
Scout: A hardware-accelerated system for quantitatively driven visualization and analysis Proceedings Article
In: Visualization, 2004. IEEE, pp. 171–178, IEEE 2004, (LA-UR-04-7045).
Abstract | Links | BibTeX | Tags: hardware-accelerated, quantitatively driven visualization, scout
@inproceedings{mccormick2004scout,
title = {Scout: A hardware-accelerated system for quantitatively driven visualization and analysis},
author = {Patrick McCormick and Jeff Inman and James Ahrens and Charles Hansen and Greg Roth},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/ScoutAHardware-AcceleratedSystemForQuantitativelyDrivenVisualizationAndAnalysis.pdf},
year = {2004},
date = {2004-01-01},
booktitle = {Visualization, 2004. IEEE},
pages = {171--178},
organization = {IEEE},
abstract = {Quantitative techniques for visualization are critical to the successful analysis of both acquired and simulated scientific data. Many visualization techniques rely on indirect mappings, such as transfer functions, to produce the final imagery. In many situations, it is preferable and more powerful to express these mappings as mathematical expressions, or queries, that can then be directly applied to the data. In this paper, we present a hardware-accelerated system that provides such capabilities and exploits current graphics hardware for portions of the computational tasks that would otherwise be executed on the CPU. In our approach, the direct programming of the graphics processor using a concise data parallel language, gives scientists the capability to efficiently explore and visualize data sets.},
note = {LA-UR-04-7045},
keywords = {hardware-accelerated, quantitatively driven visualization, scout},
pubstate = {published},
tppubtype = {inproceedings}
}
2003
Woodring, Jonathan; Shen, Han-Wei
Chronovolumes: A Direct Rendering Technique for Visualizing Time-varying Data Proceedings Article
In: Proceedings of the 2003 Eurographics/IEEE TVCG Workshop on Volume Graphics, pp. 27–34, ACM, Tokyo, Japan, 2003, ISBN: 1-58113-745-1.
Abstract | Links | BibTeX | Tags: viewing algorithms
@inproceedings{Woodring:2003:CDR:827051.827054,
title = {Chronovolumes: A Direct Rendering Technique for Visualizing Time-varying Data},
author = {Jonathan Woodring and Han-Wei Shen},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/Chronovolumes.pdf
http://doi.acm.org/10.1145/827051.827054},
doi = {10.1145/827051.827054},
isbn = {1-58113-745-1},
year = {2003},
date = {2003-01-01},
booktitle = {Proceedings of the 2003 Eurographics/IEEE TVCG Workshop on Volume Graphics},
pages = {27--34},
publisher = {ACM},
address = {Tokyo, Japan},
series = {VG '03},
abstract = {We present a new method for displaying time varying volumetric data. The core of the algorithm is an integration through time producing a single view volume that captures the essence of multiple time steps in a sequence. The resulting view volume then can be viewed with traditional raycasting techniques. With different time integration functions, we can generate several kinds of resulting chronovolumes, which illustrate differing types of time vary- ing features to the user. By utilizing graphics hardware and texture memory, the integration through time can be sped up, allowing the user interactive control over the temporal transfer function and exploration of the data.},
keywords = {viewing algorithms},
pubstate = {published},
tppubtype = {inproceedings}
}
Woodring, Jonathan; Wang, Chaoli; Shen, Han-Wei
High dimensional direct rendering of time-varying volumetric data Proceedings Article
In: IEEE, 2003.
Abstract | Links | BibTeX | Tags: hyperprojection, hyperslice, inte- gration operator, raycasting, time-varying data, transfer function, volume rendering
@inproceedings{Woodring2003,
title = {High dimensional direct rendering of time-varying volumetric data},
author = {Jonathan Woodring and Chaoli Wang and Han-Wei Shen},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/HighDimensionalDirectRenderingOfTime-VaryingVolumetricData.pdf},
year = {2003},
date = {2003-01-01},
publisher = {IEEE},
abstract = {We present an alternative method for viewing time-varying volu- metric data. We consider such data as a four-dimensional data field, rather than considering space and time as separate entities. If we treat the data in this manner, we can apply high dimensional slicing and projection techniques to generate an image hyperplane. The user is provided with an intuitive user interface to specify arbitrary hyperplanes in 4D, which can be displayed with standard volume rendering techniques. From the volume specification, we are able to extract arbitrary hyperslices, combine slices together into a hy- perprojection volume, or apply a 4D raycasting method to generate the same results. In combination with appropriate integration op- erators and transfer functions, we are able to extract and present different space-time features to the user.},
keywords = {hyperprojection, hyperslice, inte- gration operator, raycasting, time-varying data, transfer function, volume rendering},
pubstate = {published},
tppubtype = {inproceedings}
}
Stompel, Aleksander; Ma, Kwan-Liu; Lum, Eric B; Ahrens, James; Patchett, John
SLIC: scheduled linear image compositing for parallel volume rendering Proceedings Article
In: Proceedings of the 2003 IEEE Symposium on Parallel and Large-Data Visualization and Graphics, pp. 6, IEEE Computer Society 2003, (LA-UR-03-5482).
Abstract | Links | BibTeX | Tags: high-performance computing, image com- positing, parallel rendering, PC clusters, visualization, vol- ume rendering
@inproceedings{stompel2003slic,
title = {SLIC: scheduled linear image compositing for parallel volume rendering},
author = {Aleksander Stompel and Kwan-Liu Ma and Eric B Lum and James Ahrens and John Patchett},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/SLICScheduledLinearImageCmpositingForParallelVolumeRendering.pdf},
year = {2003},
date = {2003-01-01},
booktitle = {Proceedings of the 2003 IEEE Symposium on Parallel and Large-Data Visualization and Graphics},
pages = {6},
organization = {IEEE Computer Society},
abstract = {Parallel volume rendering offers a feasible solution to the large data visualization problem by distributing both the data and rendering calculations among multiple computers connected by a network. In sort-last parallel volume rendering, each processor generates an image of its assigned subvolume, which is blended together with other images to derive the final image. Improving the efficiency of this compositing step, which requires interprocesssor communication, is the key to scalable, interactive rendering. The recent trend of using hardware-accelerated volume rendering demands further acceleration of the image compositing step. This paper presents a new optimized parallel image compositing algorithm and its performance on a PC cluster. Our test results show that this new algorithm offers significant savings over previous algorithms in both communication and compositing costs. On a 64-node PC cluster with a 100BaseT network interconnect, we can achieve interactive rendering rates for images at resolutions up to 1024 × 1024 pixels at several frames per second.},
note = {LA-UR-03-5482},
keywords = {high-performance computing, image com- positing, parallel rendering, PC clusters, visualization, vol- ume rendering},
pubstate = {published},
tppubtype = {inproceedings}
}
2001
Kniss, Joe; McCormick, Patrick; McPherson, Allen; Ahrens, James; Painter, James; Keahey, Alan; Hansen, Charles
TRex: Interactive Texture Based Volume Rendering for Extremely Large Datasets Journal Article
In: 2001, (LA-UR-01-1723).
Abstract | Links | BibTeX | Tags: interactive, texture, volume rendering
@article{knissytrex,
title = {TRex: Interactive Texture Based Volume Rendering for Extremely Large Datasets},
author = {Joe Kniss and Patrick McCormick and Allen McPherson and James Ahrens and James Painter and Alan Keahey and Charles Hansen},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/TRexInteractiveTextureBasedVolumeRenderingForExtremelyLargeDatasets.pdf},
year = {2001},
date = {2001-08-01},
abstract = {Many of today’s scientific simulations are capable of producing terabytes to petabytes of data. Visualization plays a critical role in understanding and analyzing the results of these simulations. Hardware accelerated direct volume rendering has proven to be an excellent visualization modality for both scientific and medical data sets. Current graphics hardware implementations limit the size of interactive datasets to sizes that are orders of magnitude smaller than these datasets. We present a scalable system which takes advantage of parallel graphics hardware, software based compositing, and high performance I/O. The goals of our application are to provide near interactive display rates for terabyte sized, time-varying, datasets and allow moderately sized datasets to be visualized in virtual environments. We also present a novel set of direct manipulation widgets for interacting with, and querying, the visualization.},
note = {LA-UR-01-1723},
keywords = {interactive, texture, volume rendering},
pubstate = {published},
tppubtype = {article}
}
Law, Charles; Henderson, Amy; Ahrens, James
An application architecture for large data visualization: a case study Proceedings Article
In: Proceedings of the IEEE 2001 symposium on parallel and large-data visualization and graphics, pp. 125–128, IEEE Press 2001, (LA-UR-01-5530).
Abstract | Links | BibTeX | Tags: application architecture, visualization
@inproceedings{law2001application,
title = {An application architecture for large data visualization: a case study},
author = {Charles Law and Amy Henderson and James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/AnApplicationFarhitectureForLargeDataVisualizationACaseStudy.pdf},
year = {2001},
date = {2001-01-01},
booktitle = {Proceedings of the IEEE 2001 symposium on parallel and large-data visualization and graphics},
pages = {125--128},
organization = {IEEE Press},
abstract = {In this case study we present an open-source visualization application with a duta-parallel novel application architecture. The architecture is unique because is uses the Tcl scripting language to synchronize the user integuce with the VTK parallel visualization pipeline and parallel-rendering module. The resulting application shows scalable performance, and is easily extendable because of its simple modulur architecture. We demonstrate the application with a 9.8 gigabyte structured-grid ocean model.},
note = {LA-UR-01-5530},
keywords = {application architecture, visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
Ahrens, James; Brislawn, Kristi; Martin, Ken; Geveci, Berk; Law, Charles; Papka, Michael
Large-scale data visualization using parallel data streaming Journal Article
In: Computer Graphics and Applications, IEEE, vol. 21, no. 4, pp. 34–41, 2001, (LA-UR-01-0970).
Abstract | Links | BibTeX | Tags: data streaming, LargeScaleVisualization, MPI, ParallelVisualization, VTK
@article{ahrens2001large,
title = {Large-scale data visualization using parallel data streaming},
author = {James Ahrens and Kristi Brislawn and Ken Martin and Berk Geveci and Charles Law and Michael Papka},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/LargeScaleDataVisualizationUsingParallelDataStreaming.pdf},
year = {2001},
date = {2001-01-01},
journal = {Computer Graphics and Applications, IEEE},
volume = {21},
number = {4},
pages = {34--41},
publisher = {IEEE},
abstract = {Effective large-scale data visualization remains a significant and important challenge with analysis codes already producing terabyte results on clusters with thousands of processors. Frequently the analysis codes produce distributed data and consume a significant portion of the available memory per node. This paper presents an architectural approach to handling these visualization problems based on mixed dataset topology parallel data streaming. This enables visualizations on a parallel cluster that would normally require more storage/memory than is available while at the same time achieving high code reuse. Results from a variety of hardware and visualization configurations are discussed with data sizes ranging near to a petabyte.},
note = {LA-UR-01-0970},
keywords = {data streaming, LargeScaleVisualization, MPI, ParallelVisualization, VTK},
pubstate = {published},
tppubtype = {article}
}
Keahey, Alan; McCormick, Patrick; Ahrens, James; Keahey, Katarzyna
Qviz: a framework for querying and visualizing data Proceedings Article
In: Photonics West 2001-Electronic Imaging, pp. 259–267, International Society for Optics and Photonics 2001, (LA-UR-00-6116).
Abstract | Links | BibTeX | Tags: analytical queries, data visualization, multivariate visualization, parallel processing
@inproceedings{keahey2001qviz,
title = {Qviz: a framework for querying and visualizing data},
author = {Alan Keahey and Patrick McCormick and James Ahrens and Katarzyna Keahey},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/QvizAFrameworkForQueryingAndVisualizaingData.pdf},
year = {2001},
date = {2001-01-01},
booktitle = {Photonics West 2001-Electronic Imaging},
pages = {259--267},
organization = {International Society for Optics and Photonics},
abstract = {Qviz is a lightweight, modular, and easy to use parallel system for interactive analytical query processing and visual presentation of large datasets. Qviz allows queries of arbitrary complexity to be easily constructed using a specialized scripting language. Visual presentation of the results is also easily achived via simple scripted and interactive commands to our query-specific visualization tools. This paper describes our initial experiences with the Qviz system for querying and visualizing scientific datasets, showing how Qviz has been used in two different applications: ocean modeling and linear accelerator simulations.},
note = {LA-UR-00-6116},
keywords = {analytical queries, data visualization, multivariate visualization, parallel processing},
pubstate = {published},
tppubtype = {inproceedings}
}
2000
Keahey, Katarzyna; Beckman, Peter; Ahrens, James
Ligature: Component architecture for high performance applications Journal Article
In: International Journal of High Performance Computing Applications, vol. 14, no. 4, pp. 347–356, 2000, (LA-UR-00-1519).
Abstract | Links | BibTeX | Tags: component architecture
@article{keahey2000ligature,
title = {Ligature: Component architecture for high performance applications},
author = {Katarzyna Keahey and Peter Beckman and James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/LigatureComponentArchitectureForHigh-PerformanceApplications.pdf},
year = {2000},
date = {2000-01-01},
journal = {International Journal of High Performance Computing Applications},
volume = {14},
number = {4},
pages = {347--356},
publisher = {SAGE Publications},
abstract = {The increasing feasibility of developing applications spanning nationwide supercomputing resources makes pos- sible the creation of simulations composed of multiple in- terdisciplinary components and capable of modeling natu- ral and social phenomena of national importance with un- precedented speed and accuracy. However, the potential offered by hardware technology often fails to be fully re- alized due to the lack of software environments support- ing such efforts. Furthermore, the complexity of combin- ing within one application components with different per- formance characteristics often prevents such applications from achieving required performance levels. The Ligature project at LANL addresses the issue of designing a soft- ware infrastructure enabling fast and efficient development of multi-component applications, and that of providing per- formance guidance to the programmer using this infrastruc- ture. Ligature allows the programmer to define component interfaces specifying how heterogeneous, distributed com- ponents can interact within a larger system and provides a reusable infrastructure capable of connecting these com- ponents. These interfaces, as well as information about component performance are accessible through a database. Within this framework we are trying to understand how information about the performance of individual compo- nents, and information about performance of the framework can be combined to develop a performance-aware multi- component application.},
note = {LA-UR-00-1519},
keywords = {component architecture},
pubstate = {published},
tppubtype = {article}
}
Ahrens, James; Law, Charles; Schroeder, Will; Martin, Ken; Papka, Michael
A Parallel Approach for Efficiently Visualizing Extremely Large, Time-Varying Datasets. Technical Report
2000, (LA-UR-00-1620).
Abstract | Links | BibTeX | Tags: large datasets, visualization
@techreport{info:lanl-repo/lareport/LA-UR-00-1620,
title = {A Parallel Approach for Efficiently Visualizing Extremely Large, Time-Varying Datasets.},
author = {James Ahrens and Charles Law and Will Schroeder and Ken Martin and Michael Papka},
url = {http://datascience.dsscale.org/wp-content/uploads/2017/09/LA-UR-00-1620.pdf},
year = {2000},
date = {2000-01-01},
abstract = {A significant unsolved problem in scientific visualization is how to efficiently visualize extremely large time-varying datasets. Using parallelism provides a promising solution. One drawback of this approach is the high overhead and specialized knowledge often required to create parallel visualization programs. In this paper, we present a parallel visualization system that is scalable, portable and encapsulates parallel programming details for its users. Our approach was to augment an existing visualization system, the visualization toolkit(VTK). Process and communication abstractions were added in order to support task, pipeline and data parallelism. The resulting system allows users to quickly write parallel visualization programs and avoid rewriting these programs when porting to new platforms. The performance of a collection of parallel visualization programs written using this system and run on both a cluster of SGI Origin 2000s and a Linux-based PC cluster is presented. In addition to showing the utility of our approach, the results offer a comparison of the performance of commodity-based computing clusters.},
howpublished = {IEEE/VISUALIZATION CONF. ; 200010 ; SALT LAKE CITY},
note = {LA-UR-00-1620},
keywords = {large datasets, visualization},
pubstate = {published},
tppubtype = {techreport}
}
1998
Ahrens, James; Painter, James
Efficient sort-last rendering using compression-based image compositing Proceedings Article
In: Proceedings of the 2nd Eurographics Workshop on Parallel Graphics and Visualization, pp. 145–151, Citeseer 1998, (LA-UR-98-2968).
Abstract | Links | BibTeX | Tags: image compositing, sort-last rendering
@inproceedings{ahrens1998efficient,
title = {Efficient sort-last rendering using compression-based image compositing},
author = {James Ahrens and James Painter},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/EfficientSort-LastRenderingUsingCompression-BasedImageCompositing.pdf},
year = {1998},
date = {1998-01-01},
booktitle = {Proceedings of the 2nd Eurographics Workshop on Parallel Graphics and Visualization},
pages = {145--151},
organization = {Citeseer},
abstract = {State of the art scientific simulations are currently working with data set sizes on the order of a billion cells. Parallel rendering is a promising approach for interactively visualizing multiple isosurface variables from data sets of this magnitude. In sort-last rendering, each processor creates a depth buffered image of its assigned objects. All processors’ images are composited together to create a final result. Improving the efficiency of this compositing step is key to interactive parallel rendering. This paper presents a compression-based image compositing algorithm which can provide significant savings in both communication and compositing costs.},
note = {LA-UR-98-2968},
keywords = {image compositing, sort-last rendering},
pubstate = {published},
tppubtype = {inproceedings}
}
McCormick, Patrick; Ahrens, James
Visualization of wildfire simulations Journal Article
In: Computer Graphics and Applications, IEEE, vol. 18, no. 2, pp. 17–19, 1998, (LA-UR-98-0646).
Abstract | Links | BibTeX | Tags: simulation, simulation output analysis, wildfire
@article{mccormick1998visualization,
title = {Visualization of wildfire simulations},
author = {Patrick McCormick and James Ahrens },
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/VisualizationOfWildfireSimulations.pdf},
year = {1998},
date = {1998-01-01},
journal = {Computer Graphics and Applications, IEEE},
volume = {18},
number = {2},
pages = {17--19},
publisher = {IEEE},
abstract = {Newspaper headlines constantly remind us of the human and property losses we suffer from wildfires, severe storms, earthquakes, and other natural disasters. These disasters cost the United States hundreds of lives and billions of dollars annually. Scientists at Los Alamos National Laboratory are developing computer models to predict the evolution of such disasters. Predicting the course of these events in faster than real time permits developing management strategies to minimize their adverse consequences. Presently, the complexity of models that forecast crises requires the advanced computing systems available at Los Alamos. In the near future, these models will be adapted for use in planning, training, and operational situations, but will still require advanced computing systems to run.},
note = {LA-UR-98-0646},
keywords = {simulation, simulation output analysis, wildfire},
pubstate = {published},
tppubtype = {article}
}
1997
Ahrens, James; McCormick, Patrick; Bossert, James; Reisner, Jon; Winterkamp, Judith
Case study: Wildfire visualization Proceedings Article
In: Visualization'97., Proceedings, pp. 451–454, IEEE 1997, (LA-UR-97-2761).
Abstract | Links | BibTeX | Tags: simulation, simulation output analysis, wildfire
@inproceedings{ahrens1997case,
title = {Case study: Wildfire visualization},
author = {James Ahrens and Patrick McCormick and James Bossert and Jon Reisner and Judith Winterkamp},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/CaseStudyWildfireVisualization.pdf},
year = {1997},
date = {1997-01-01},
booktitle = {Visualization'97., Proceedings},
pages = {451--454},
organization = {IEEE},
abstract = {The ability to forecast the progress of crisis events would significantly reduce human suffering and loss of life, the destruction of property, and expenditures for assessment and recovery. Los Alamos National Laboratory has established a scientific thrust in crisis forecasting to address this national challenge. In the initial phase of this project, scientists at Los Alamos are developing computer models to predict the spread of a wildfire. Visualization of the results of the wildfire simulation will be used by scientists to assess the quality of the simulation and eventually by fire personnel as a visual forecast of the wildfire’s evolution. The fire personnel and scientists want the visualization to look as realistic as possible without compromising scientific accuracy. This paper describes how the visualization was created, analyzes the tools and approach that was used, and suggests directions for future work and research.},
note = {LA-UR-97-2761},
keywords = {simulation, simulation output analysis, wildfire},
pubstate = {published},
tppubtype = {inproceedings}
}
1996
Jakobovits, Rex; Lewis, Lara; Ahrens, James; Shapiro, Linda; Tanimoto, Steven; Brinkley, James
A visual database environment for scientific research Journal Article
In: Journal of Visual Languages & Computing, vol. 7, no. 4, pp. 361–375, 1996, (LA-UR-pending).
Abstract | Links | BibTeX | Tags: visual database
@article{jakobovits1996visual,
title = {A visual database environment for scientific research},
author = {Rex Jakobovits and Lara Lewis and James Ahrens and Linda Shapiro and Steven Tanimoto and James Brinkley},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/AVisualDatabaseEnvironmentForScientificResearch.pdf},
year = {1996},
date = {1996-01-01},
journal = {Journal of Visual Languages & Computing},
volume = {7},
number = {4},
pages = {361--375},
publisher = {Elsevier},
abstract = {This paper describes a visual database environment designed to be used for scientific research in the imaging sciences. It provides hierarchical relational structures that allow the user to model data as entities possessing properties, parts and relationships, and it supports multi-level queries on these structures. A schema constructor interface allows users to define for each structure, not only its components, but also its visualization, which is built from its components using graphical primitives. Finally, an experiment management subsystem allows users to construct and run computa- tional experiments that apply imaging operators to data from the database. The experiment management system keeps track of the experimental procedures developed by the user and the results generated by executing these procedures.},
note = {LA-UR-pending},
keywords = {visual database},
pubstate = {published},
tppubtype = {article}
}
1995
Ahrens, James; Hansen, Charles
Cost-effective data-parallel load balancing Proceedings Article
In: ICPP (2), pp. 218–221, 1995, (LA-UR-95-1462).
Abstract | Links | BibTeX | Tags: data-parallel, load balancing
@inproceedings{ahrens1995cost,
title = {Cost-effective data-parallel load balancing},
author = {James Ahrens and Charles Hansen},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/Cost-EffectiveData-ParallelLoadBalancing.pdf},
year = {1995},
date = {1995-01-01},
booktitle = {ICPP (2)},
pages = {218--221},
abstract = {Load balancing algorithms improve a program’s performance on unbalanced datasets, but can degrade performance on balanced datasets, because unnecessary load redistributions occur. This paper presents a cost-effective data-parallel load balancing algorithm which performs load redistributions only when the possible savings outweigh the redistribution costs. Experiment s with a data-parallel polygon renderer show a performance improvement of up to a factor of 33 on unbalanced datasets and a maximum performance loss of only 27 percent on balanced datasets when using this algorithm.},
note = {LA-UR-95-1462},
keywords = {data-parallel, load balancing},
pubstate = {published},
tppubtype = {inproceedings}
}
1994
Shapiro, Linda; Tanimoto, Steven; Brinkley, James; Ahrens, James; Jakobovits, Rex; Lewis, Lara
A visual database system for data and experiment management in model-based computer vision Proceedings Article
In: CAD-Based Vision Workshop, 1994., Proceedings of the 1994 Second, pp. 64–72, IEEE 1994, (LA-UR-pending).
Abstract | Links | BibTeX | Tags: model-based computer vision, visual database
@inproceedings{shapiro1994visual,
title = {A visual database system for data and experiment management in model-based computer vision},
author = {Linda Shapiro and Steven Tanimoto and James Brinkley and James Ahrens and Rex Jakobovits and Lara Lewis},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/AVisualDatabaseSystemForDataAndExperimentManagementInModel-BasedComputerVision.pdf},
year = {1994},
date = {1994-01-01},
booktitle = {CAD-Based Vision Workshop, 1994., Proceedings of the 1994 Second},
pages = {64--72},
organization = {IEEE},
abstract = {Computer vision researchers work with many different forms of data. Model-based vision systems work with geometric models of 3D objects, intensity or range images, and many different kinds of features that are extracted from these images. The recognition/pose estimation process involves a number of different steps and different operations all of which take in and generate various forms of data. Figure 1 illustrates the operations and data types required for a sample recognition process (Shapiro, Neal, and Ponder; 1992). The process starts with a gray-scale image and produces an edge image, a line segment structure, and a triple chain structure (described in Section 2). Each object in the model database is represented by a set of its major views, and each major view is represented by a triple chain structure. The triple chain structure that was extracted from the image and the set of triple chain structures representing the major views (view classes) are input to the matching algorithm which tries to identify the view class or classes that most closely match the view in the image. This process illustrates the kind of experiments that modelare simpler than the one shown, and some are much more complex.},
note = {LA-UR-pending},
keywords = {model-based computer vision, visual database},
pubstate = {published},
tppubtype = {inproceedings}
}
1993
Ortega, Frank; Hansen, Charles; Ahrens, James
Fast Data Parallel Polygon Rendering Proceedings Article
In: Proceedings of the 1993 ACM/IEEE Conference on Supercomputing, pp. 709–718, ACM, Portland, Oregon, USA, 1993, ISBN: 0-8186-4340-4, (LA-UR-93-3173).
Abstract | Links | BibTeX | Tags: data-parallel, polygon rendering
@inproceedings{Ortega:1993:FDP:169627.169820,
title = {Fast Data Parallel Polygon Rendering},
author = {Frank Ortega and Charles Hansen and James Ahrens},
url = {http://datascience.dsscale.org/wp-content/uploads/2016/06/FastDataParellelPolygonRendering.pdf
http://doi.acm.org/10.1145/169627.169820},
doi = {10.1145/169627.169820},
isbn = {0-8186-4340-4},
year = {1993},
date = {1993-01-01},
booktitle = {Proceedings of the 1993 ACM/IEEE Conference on Supercomputing},
pages = {709--718},
publisher = {ACM},
address = {Portland, Oregon, USA},
series = {Supercomputing '93},
abstract = {This paper describes a parallel method for polygonal rendering ona massively pamliel SIMD machine. This method, hazed on a simple shading model, is taqeted for applications which require very fad polygon rendering for extremely large sets of polygons such as is found in many scientific visualization applications, The algorithms described in this paper are incorpomted into a library of 9D gmphics routines written for the Connection Machine. The routines am implemented on both the CM-ZOO and the CM-5. This libmry enables a acientid to display $D shaded polygons directly jhm a pamllel machine without the need to tmnamit huge amounts of data to a pod-processing rendering system.},
note = {LA-UR-93-3173},
keywords = {data-parallel, polygon rendering},
pubstate = {published},
tppubtype = {inproceedings}
}
0000
Sane, Sudhanshu; Yenpure, Abhishek; Bujack, Roxana; Larsen, Matthew; Moreland, Ken; Garth, Christoph; Johnson, Chris; Childs, Hank
Scalable In Situ Computation of Lagrangian Representations via Local Flow Maps Journal Article
In: 0000.
Abstract | Links | BibTeX | Tags:
@article{osti_1808167,
title = {Scalable In Situ Computation of Lagrangian Representations via Local Flow Maps},
author = {Sudhanshu Sane and Abhishek Yenpure and Roxana Bujack and Matthew Larsen and Ken Moreland and Christoph Garth and Chris Johnson and Hank Childs},
url = {http://www.informatik.uni-leipzig.de/~bujack/2021EGPGV.pdf},
abstract = {In situ computation of Lagrangian flow maps to enable post hoc time-varying vector field analysis has recently become an active area of research. However, the current literature is largely limited to theoretical settings and lacks a solution to address scalability of the technique in distributed memory. To improve scalability, we propose and evaluate the benefits and limitations of a simple, yet novel, performance optimization. Our proposed optimization is a communication-free model resulting in local Lagrangian flow maps, requiring no message passing or synchronization between processes, intrinsically improving scalability, and thereby reducing overall execution time and alleviating the encumbrance placed on simulation codes from communication overheads. To evaluate our approach, we computed Lagrangian flow maps for four time-varying simulation vector fields and investigated how execution time and reconstruction accuracy are impacted by the number of GPUs per compute node, the total number of compute nodes, particles per rank, and storage intervals. Our study consisted of experiments computing Lagrangian flow maps with up to 67M particle trajectories over 500 cycles and used as many as 2048 GPUs across 512 compute nodes. In all, our study contributes an evaluation of a communication-free model as well as a scalability study of computing distributed Lagrangian flow maps at scale using in situ infrastructure on a modern supercomputer.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Bujack, Roxana
Discussion and Visualization of Distinguished Hyperbolic Trajectories as a Generalization of Critical Points to 2D Time-dependent Flow Proceedings Article
In: 2022 Topological Data Analysis and Visualization (TopoInVis), pp. 59-69, 2022.
@inproceedings{9975815,
title = {Discussion and Visualization of Distinguished Hyperbolic Trajectories as a Generalization of Critical Points to 2D Time-dependent Flow},
author = {Roxana Bujack},
url = {http://www.informatik.uni-leipzig.de/~bujack/2022topoInVis.pdf},
doi = {10.1109/TopoInVis57755.2022.00013},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {2022 Topological Data Analysis and Visualization (TopoInVis)},
pages = {59-69},
abstract = {Classical vector field topology has proven to be a useful visualization technique for steady flow, but its straightforward application to time-dependent flows lacks physical meaning. Necessary requirements for physical meaningfulness include the results to be objective, i.e., independent of the frame of reference of the observer, and Lagrangian, i.e., that the generalized critical points are trajectories. We analyze whether the theoretical concept of distinguished hyperbolic trajectories provides a physically meaningful generalization to classical critical points and if the existing extraction algorithms correctly compute what has been defined mathematically. We show that both theory and algorithms constitute a significant improvement over previous methods.We further present a method to visualize a time-dependent flow field in the reference frames of distinguished trajectories. The result is easy to interpret because it makes these trajectories look like classical critical points for each instance in time, but it is meaningful because it is Lagrangian and objective.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Teti, Emily; Turton, Terece; Miller, Jonah; Bujack, Roxana
Maximum likelihood estimation of difference scaling functions for suprathreshold judgments Journal Article
In: Journal of Vision, vol. 22, no. 10, pp. 9-9, 2022, ISSN: 1534-7362.
@article{10.1167/jov.22.10.9,
title = {Maximum likelihood estimation of difference scaling functions for suprathreshold judgments},
author = {Emily Teti and Terece Turton and Jonah Miller and Roxana Bujack},
url = {https://jov.arvojournals.org/article.aspx?articleid=2783632},
doi = {10.1167/jov.22.10.9},
issn = {1534-7362},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {Journal of Vision},
volume = {22},
number = {10},
pages = {9-9},
abstract = { Maximum likelihood estimation (MLE) has been used to produce perceptual scales from binary judgments of triads and quadruples. This method relies on Thurstone’s theory of a stochastic perceptual process where the perceived difference of two stimuli is the difference in their perceived strengths. It is possible that the perception of a suprathreshold difference is overestimated when adding smaller differences, a phenomenon referred to as diminishing returns. The current approach to construct a perceptual scale using MLE does not account for this phenomenon. We present a way to model the perception of differences using MLE and Thurstone’s theory, adapted to allow the possibility of diminishing returns. This method is validated using Monte Carlo simulated responses to experimental triads and can correctly model diminishing returns, the absence of diminishing returns, and the opposite of diminishing returns both in the cases when a perceptual scale is known and when the true perceived strengths of the stimuli are unknown. Additionally, this method was applied to empirical data sets to determine its feasibility in investigations of perception. Ultimately, it was found that this analysis allows for more accurate modeling of suprathreshold difference judgments, a more complete understanding of the perceptual processes underlying comparisons, and the evaluation of Thurstone’s theory of difference judgments. },
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Bujack, Roxana; Teti, Emily; Miller, Jonah; Caffrey, Elektra; Turton, Terece
The non-Riemannian nature of perceptual color space Journal Article
In: Proceedings of the National Academy of Sciences, vol. 119, no. 18, pp. e2119753119, 2022.
@article{<LineBreak>doi:10.1073/pnas.2119753119,
title = {The non-Riemannian nature of perceptual color space},
author = {Roxana Bujack and Emily Teti and Jonah Miller and Elektra Caffrey and Terece Turton},
url = {https://www.pnas.org/doi/abs/10.1073/pnas.2119753119},
doi = {10.1073/pnas.2119753119},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {Proceedings of the National Academy of Sciences},
volume = {119},
number = {18},
pages = {e2119753119},
abstract = {The scientific community generally agrees on the theory, introduced by Riemann and furthered by Helmholtz and Schrödinger, that perceived color space is not Euclidean but rather, a three-dimensional Riemannian space. We show that the principle of diminishing returns applies to human color perception. This means that large color differences cannot be derived by adding a series of small steps, and therefore, perceptual color space cannot be described by a Riemannian geometry. This finding is inconsistent with the current approaches to modeling perceptual color space. Therefore, the assumed shape of color space requires a paradigm shift. Consequences of this apply to color metrics that are currently used in image and video processing, color mapping, and the paint and textile industries. These metrics are valid only for small differences. Rethinking them outside of a Riemannian setting could provide a path to extending them to large differences. This finding further hints at the existence of a second-order Weber–Fechner law describing perceived differences.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Bujack, Roxana; Zhang, Xinhua; Suk, Tomáš; Rogers, David
Systematic generation of moment invariant bases for 2D and 3D tensor fields Journal Article
In: Pattern Recognition, vol. 123, pp. 108313, 2022, ISSN: 0031-3203.
@article{BUJACK2022108313,
title = {Systematic generation of moment invariant bases for 2D and 3D tensor fields},
author = {Roxana Bujack and Xinhua Zhang and Tomáš Suk and David Rogers},
url = {https://www.sciencedirect.com/science/article/pii/S0031320321004933},
doi = {https://doi.org/10.1016/j.patcog.2021.108313},
issn = {0031-3203},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {Pattern Recognition},
volume = {123},
pages = {108313},
abstract = {Moment invariants have been successfully applied to pattern detection tasks in 2D and 3D scalar, vector, and matrix valued data. However so far no flexible basis of invariants exists, i.e., no set that is optimal in the sense that it is complete and independent for every input pattern. In this paper, we prove that a basis of moment invariants can be generated that consists of tensor contractions of not more than two different moment tensors each under the conjecture of the set of all possible tensor contractions to be complete. This result allows us to derive the first generator algorithm that produces flexible bases of moment invariants with respect to orthogonal transformations by selecting a single non-zero moment to pair with all others in these two-factor products. Since at least one non-zero moment can be found in every non-zero pattern, this approach always generates a complete set of descriptors.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Bujack, Roxana; Bresciani, Etienne; Waters, Jiajia; Schroeder, Will
Topological Segmentation of 2D Vector Fields Journal Article
In: 2022, (LEVIA'22. Leipzig, 06.04.2022 - 07.04.2022).
@article{bujack2022topological,
title = {Topological Segmentation of 2D Vector Fields},
author = {Roxana Bujack and Etienne Bresciani and Jiajia Waters and Will Schroeder},
url = {http://www.informatik.uni-leipzig.de/~bujack/2022Levia.pdf},
doi = {https://doi.org/10.36730/2022.1.levia.5},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
abstract = {ector field topology has a long tradition as a visualization tool. The separatrices segment the domain visually into canonical regions in which all streamlines behave qualitatively the same. But application scientists often need more than just a nice image for their data analysis, and, to best of our knowledge, so far no workflow has been proposed to extract the critical points, the associated separatrices, and then provide the induced segmentation on the data level. We present a workflow that computes the segmentation of the domain of a 2D vector field based on its separatrices. We show how it can be used for the extraction of quantitative information about each segment in two applications: groundwater flow and heat exchange.},
note = {LEVIA'22. Leipzig, 06.04.2022 - 07.04.2022},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Nardini, Pascal; Chen, Min; Böttinger, Michael; Scheuermann, Gerik; Bujack, Roxana
Automatic Improvement of Continuous Colormaps in Euclidean Colorspaces Journal Article
In: Computer Graphics Forum, vol. 40, no. 3, pp. 361-373, 2021.
@article{https://doi.org/10.1111/cgf.14313,
title = {Automatic Improvement of Continuous Colormaps in Euclidean Colorspaces},
author = {Pascal Nardini and Min Chen and Michael Böttinger and Gerik Scheuermann and Roxana Bujack},
url = {http://www.informatik.uni-leipzig.de/~bujack/2021EuroVis.pdf},
doi = {https://doi.org/10.1111/cgf.14313},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
journal = {Computer Graphics Forum},
volume = {40},
number = {3},
pages = {361-373},
abstract = {Colormapping is one of the simplest and most widely used data visualization methods within and outside the visualization community. Uniformity, order, discriminative power, and smoothness of continuous colormaps are the most important criteria for evaluating and potentially improving colormaps. We present a local and a global automatic optimization algorithm in Euclidean color spaces for each of these design rules in this work. As a foundation for our optimization algorithms, we used the CCC-Tool colormap specification (CMS); each algorithm has been implemented in this tool. In addition to synthetic examples that demonstrate each method's effect, we show the outcome of some of the methods applied to a typhoon simulation.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Bujack, Roxana; Tsai, Karen; Morley, Steven; Bresciani, Etienne
Open source vector field topology Journal Article
In: SoftwareX, vol. 15, pp. 100787, 2021, ISSN: 2352-7110.
@article{BUJACK2021100787,
title = {Open source vector field topology},
author = {Roxana Bujack and Karen Tsai and Steven Morley and Etienne Bresciani},
url = {http://www.informatik.uni-leipzig.de/~bujack/2021SoftwareX.pdf},
doi = {https://doi.org/10.1016/j.softx.2021.100787},
issn = {2352-7110},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
journal = {SoftwareX},
volume = {15},
pages = {100787},
abstract = {A myriad of physical phenomena, such as fluid flows, magnetic fields, and population dynamics are described by vector fields. More often than not, vector fields are complex and their analysis is challenging. Vector field topology is a powerful analysis technique that consists in identifying the most essential structure of a vector field. Its topological features include critical points and separatrices, which segment the domain into regions of coherent flow behavior, provide a sparse and semantically meaningful representation of the underlying data. However, a broad adoption of this formidable technique has been hampered by the lack of open source software implementing it. The Visualization Toolkit (VTK) now contains the filter vtkVectorFieldTopology that extracts the topological skeleton of 2D and 3D vector fields. This paper describes our implementation and demonstrates its broad applicability with two real-world examples from hydrology and space physics.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Nardini, Pascal; Chen, Min; Bujack, Roxana; Bottinger, Michael; Scheuermann, Gerik
A Testing Environment for Continuous Colormaps Journal Article
In: IEEE Transactions on Visualization and Computer Graphics, vol. 27, no. 2, pp. 1043-1053, 2021.
@article{9216559,
title = {A Testing Environment for Continuous Colormaps},
author = {Pascal Nardini and Min Chen and Roxana Bujack and Michael Bottinger and Gerik Scheuermann},
url = {http://www.informatik.uni-leipzig.de/~bujack/2020Vis.pdf},
doi = {10.1109/TVCG.2020.3028955},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
journal = {IEEE Transactions on Visualization and Computer Graphics},
volume = {27},
number = {2},
pages = {1043-1053},
abstract = {Many computer science disciplines (e.g., combinatorial optimization, natural language processing, and information retrieval) use standard or established test suites for evaluating algorithms. In visualization, similar approaches have been adopted in some areas (e.g., volume visualization), while user testimonies and empirical studies have been the dominant means of evaluation in most other areas, such as designing colormaps. In this paper, we propose to establish a test suite for evaluating the design of colormaps. With such a suite, the users can observe the effects when different continuous colormaps are applied to planar scalar fields that may exhibit various characteristic features, such as jumps, local extrema, ridge or valley lines, different distributions of scalar values, different gradients, different signal frequencies, different levels of noise, and so on. The suite also includes an expansible collection of real-world data sets including the most popular data for colormap testing in the visualization literature. The test suite has been integrated into a web-based application for creating continuous colormaps (https://ccctool.com/), facilitating close inter-operation between design and evaluation processes. This new facility complements traditional evaluation methods such as user testimonies and empirical studies.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Kinner, Eric Georg; Lukasczyk, Jonas; Rogers, David; Maciejewski, Ross; Garth, Christoph
Interpolation of Scientific Image Databases Proceedings Article
In: Garth, Christoph; Aurich, Jan C.; Linke, Barbara; Müller, Ralf; Ravani, Bahram; Weber, Gunther H.; Kirsch, Benjamin (Ed.): 2nd International Conference of the DFG International Research Training Group 2057 – Physical Modeling for Virtual Manufacturing (iPMVM 2020), pp. 19:1–19:17, Schloss Dagstuhl -- Leibniz-Zentrum für Informatik, Dagstuhl, Germany, 2021, ISSN: 2190-6807.
@inproceedings{kinner_et_al:OASIcs.iPMVM.2020.19,
title = {Interpolation of Scientific Image Databases},
author = {Eric Georg Kinner and Jonas Lukasczyk and David Rogers and Ross Maciejewski and Christoph Garth},
editor = {Christoph Garth and Jan C. Aurich and Barbara Linke and Ralf Müller and Bahram Ravani and Gunther H. Weber and Benjamin Kirsch},
url = {https://drops.dagstuhl.de/opus/volltexte/2021/13768},
doi = {10.4230/OASIcs.iPMVM.2020.19},
issn = {2190-6807},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
booktitle = {2nd International Conference of the DFG International Research Training Group 2057 – Physical Modeling for Virtual Manufacturing (iPMVM 2020)},
volume = {89},
pages = {19:1--19:17},
publisher = {Schloss Dagstuhl -- Leibniz-Zentrum für Informatik},
address = {Dagstuhl, Germany},
series = {Open Access Series in Informatics (OASIcs)},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Childs, Hank; Ahern, Sean; Ahrens, James; Bauer, Andrew; Bennett, Janine; Bethel, Wes; Bremer, Peer-Timo; Brugger, Eric; Cottam, Joseph; Dorier, Matthieu; Dutta, Soumya; Favre, Jean; Fogal, Thomas; Frey, Steffen; Garth, Christoph; Geveci, Berk; Godoy, William; Hansen, Charles; Harrison, Cyrus; Hentschel, Bernd; Insley, Joseph; Johnson, Chris; Klasky, Scott; Knoll, Aaron; Kress, James; Larsen, Matthew; Lofstead, Jay; Ma, Kwan-Liu; Malakar, Preeti; Meredith, Jeremy; Moreland, Kenneth; Navrátil, Paul; O’Leary, Patrick; Parashar, Manish; Pascucci, Valerio; Patchett, John; Peterka, Tom; Petruzza, Steve; Podhorszki, Norbert; Pugmire, David; Rasquin, Michel; Rizzi, Silvio; Rogers, David; Sane, Sudhanshu; Sauer, Franz; Sisneros, Robert; Shen, Han-Wei; Usher, Will; Vickery, Rhonda; Vishwanath, Venkatram; Wald, Ingo; Wang, Ruonan; Weber, Gunther; Whitlock, Brad; Wolf, Matthew; Yu, Hongfeng; Ziegeler, Sean
A terminology for in situ visualization and analysis systems Journal Article
In: The International Journal of High Performance Computing Applications, vol. 34, no. 6, pp. 676-691, 2020.
@article{doi:10.1177/1094342020935991,
title = {A terminology for in situ visualization and analysis systems},
author = {Hank Childs and Sean Ahern and James Ahrens and Andrew Bauer and Janine Bennett and Wes Bethel and Peer-Timo Bremer and Eric Brugger and Joseph Cottam and Matthieu Dorier and Soumya Dutta and Jean Favre and Thomas Fogal and Steffen Frey and Christoph Garth and Berk Geveci and William Godoy and Charles Hansen and Cyrus Harrison and Bernd Hentschel and Joseph Insley and Chris Johnson and Scott Klasky and Aaron Knoll and James Kress and Matthew Larsen and Jay Lofstead and Kwan-Liu Ma and Preeti Malakar and Jeremy Meredith and Kenneth Moreland and Paul Navrátil and Patrick O’Leary and Manish Parashar and Valerio Pascucci and John Patchett and Tom Peterka and Steve Petruzza and Norbert Podhorszki and David Pugmire and Michel Rasquin and Silvio Rizzi and David Rogers and Sudhanshu Sane and Franz Sauer and Robert Sisneros and Han-Wei Shen and Will Usher and Rhonda Vickery and Venkatram Vishwanath and Ingo Wald and Ruonan Wang and Gunther Weber and Brad Whitlock and Matthew Wolf and Hongfeng Yu and Sean Ziegeler},
url = {https://dsscale.org/wp-content/uploads/2020/10/ISTP.pdf},
doi = {10.1177/1094342020935991},
year = {2020},
date = {2020-08-14},
journal = {The International Journal of High Performance Computing Applications},
volume = {34},
number = {6},
pages = {676-691},
abstract = {The term “in situ processing” has evolved over the last decade to mean both a specific strategy for visualizing and analyzing data and an umbrella term for a processing paradigm. The resulting confusion makes it difficult for visualization and analysis scientists to communicate with each other and with their stakeholders. To address this problem, a group of over 50 experts convened with the goal of standardizing terminology. This paper summarizes their findings and proposes a new terminology for describing in situ systems. An important finding from this group was that in situ systems are best described via multiple, distinct axes: integration type, proximity, access, division of execution, operation controls, and output type. This paper discusses these axes, evaluates existing systems within the axes, and explores how currently used terms relate to the axes.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Zeller, Stephanie; Rogers, David
Visualizing Science: How Color Determines What We See Journal Article
In: 2020, (Published in EOS: Science News by AGU).
@article{szellerEOS2020,
title = {Visualizing Science: How Color Determines What We See},
author = {Stephanie Zeller and David Rogers},
url = {https://eos.org/features/visualizing-science-how-color-determines-what-we-see},
year = {2020},
date = {2020-05-21},
urldate = {2020-05-21},
publisher = {EOS},
abstract = {Color plays a major role in the analysis and communication of scientific information. New tools are helping to improve how color can be applied more accurately and effectively to data.},
note = {Published in EOS: Science News by AGU},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Abram, Gregory; Adhinarayanan, Vignesh; Feng, Wu-chun; Rogers, David; Ahrens, James; Wilson, Luke
ETH: A Framework for the Design-Space Exploration of Extreme-Scale Scientific Visualization Journal Article
In: 2020.
@article{abrameth,
title = {ETH: A Framework for the Design-Space Exploration of Extreme-Scale Scientific Visualization},
author = {Gregory Abram and Vignesh Adhinarayanan and Wu-chun Feng and David Rogers and James Ahrens and Luke Wilson},
url = {https://dsscale.org/wp-content/uploads/2020/04/ETH-A-Framework-for-the-Design-Space-Exploration.pdf},
year = {2020},
date = {2020-04-07},
abstract = {As high-performance computing (HPC) moves towards the exascale era, large-scale scientific simulations are generating enormous datasets. A variety of techniques (e.g., in-situ methods, data sampling, and compression) have been proposed to help visualize these large datasets under various constraints such as storage, power, and energy. However, evaluating these techniques and understanding the various trade-offs (e.g., performance, efficiency, quality) remains a challenging task.
To enable the investigation and optimization across such tradeoffs, we propose a toolkit for the early-stage exploration of visualization and rendering approaches, job layout, and visualization pipelines. Our framework covers a broader parameter space than existing visualization applications such as ParaView and VisIt. It also promotes the study of simulation-visualization coupling strategies through a data-centric approach, rather than requiring the code itself. Furthermore, with experimentation on an extensively instrumented supercomputer, we study more metrics of interest than was previously possible. Overall, our framework will help to answer important what-if scenarios and trade-off questions in early stages of pipeline development, helping scientists to make informed choices about how to best couple a simulation code with visualization at extreme scale.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
To enable the investigation and optimization across such tradeoffs, we propose a toolkit for the early-stage exploration of visualization and rendering approaches, job layout, and visualization pipelines. Our framework covers a broader parameter space than existing visualization applications such as ParaView and VisIt. It also promotes the study of simulation-visualization coupling strategies through a data-centric approach, rather than requiring the code itself. Furthermore, with experimentation on an extensively instrumented supercomputer, we study more metrics of interest than was previously possible. Overall, our framework will help to answer important what-if scenarios and trade-off questions in early stages of pipeline development, helping scientists to make informed choices about how to best couple a simulation code with visualization at extreme scale.
Tsai, Karen; Bujack, Roxana; Geveci, Berk; Ayachit, Utkarsh; Ahrens, James
Approaches for In Situ Computation of Moments in a Data-Parallel Environment Proceedings Article
In: Frey, Steffen; Huang, Jian; Sadlo, Filip (Ed.): Eurographics Symposium on Parallel Graphics and Visualization, The Eurographics Association, 2020, ISSN: 1727-348X.
@inproceedings{10.2312:pgv.20201075,
title = {Approaches for In Situ Computation of Moments in a Data-Parallel Environment},
author = {Karen Tsai and Roxana Bujack and Berk Geveci and Utkarsh Ayachit and James Ahrens},
editor = {Steffen Frey and Jian Huang and Filip Sadlo},
url = {(http://www.google.com/url?q=http%3A%2F%2Fwww.informatik.uni-leipzig.de%2F~bujack%2F2020EGPGV.pdf&sa=D&sntz=1&usg=AOvVaw2HaZNoL1L9jYRO0br69mHW)},
doi = {10.2312/pgv.20201075},
issn = {1727-348X},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {Eurographics Symposium on Parallel Graphics and Visualization},
publisher = {The Eurographics Association},
abstract = {Feature-driven in situ data reduction can overcome the I/O bottleneck that large simulations face in modern supercomputer architectures in a semantically meaningful way. In this work, we make use of pattern detection as a black box detector of arbitrary feature templates of interest. In particular, we use moment invariants because they allow pattern detection independent of the specific orientation of a feature. We provide two open source implementations of a rotation invariant pattern detection algorithm for high performance computing (HPC) clusters with a distributed memory environment. The first one is a straightforward integration approach. The second one makes use of the Fourier transform and the Cross-Correlation Theorem. In this paper, we will compare the two approaches with respect to performance and flexibility and showcase results of the in situ integration with real world simulation code.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Sane, Sudhanshu; Bujack, Roxana; Garth, Christoph; Childs, Hank
A Survey of Seed Placement and Streamline Selection Techniques Journal Article
In: Computer Graphics Forum, vol. 39, no. 3, pp. 785-809, 2020.
@article{https://doi.org/10.1111/cgf.14036,
title = {A Survey of Seed Placement and Streamline Selection Techniques},
author = {Sudhanshu Sane and Roxana Bujack and Christoph Garth and Hank Childs},
url = {http://www.google.com/url?q=http%3A%2F%2Fwww.informatik.uni-leipzig.de%2F~bujack%2F2020Sane.pdf&sa=D&sntz=1&usg=AOvVaw1ezsZzR1EQcQw-61qWNOD-},
doi = {https://doi.org/10.1111/cgf.14036},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
journal = {Computer Graphics Forum},
volume = {39},
number = {3},
pages = {785-809},
abstract = {Streamlines are an extensively utilized flow visualization technique for understanding, verifying, and exploring computational fluid dynamics simulations. One of the major challenges associated with the technique is selecting which streamlines to display. Using a large number of streamlines results in dense, cluttered visualizations, often containing redundant information and occluding important regions, whereas using a small number of streamlines could result in missing key features of the flow. Many solutions to select a representative set of streamlines have been proposed by researchers over the past two decades. In this state-of-the-art report, we analyze and classify seed placement and streamline selection (SPSS) techniques used by the scientific flow visualization community. At a high-level, we classify techniques into automatic and manual techniques, and further divide automatic techniques into three strategies: density-based, feature-based, and similarity-based. Our analysis evaluates the identified strategy groups with respect to focus on regions of interest, minimization of redundancy, and overall computational performance. Finally, we consider the application contexts and tasks for which SPSS techniques are currently applied and have potential applications in the future.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Bujack, Roxana; Yan, Lin; Hotz, Ingrid; Garth, Christoph; Wang, Bei
State of the Art in Time-Dependent Flow Topology: Interpreting Physical Meaningfulness Through Mathematical Properties Journal Article
In: Computer Graphics Forum, vol. 39, no. 3, pp. 811-835, 2020.
@article{https://doi.org/10.1111/cgf.14037,
title = {State of the Art in Time-Dependent Flow Topology: Interpreting Physical Meaningfulness Through Mathematical Properties},
author = {Roxana Bujack and Lin Yan and Ingrid Hotz and Christoph Garth and Bei Wang},
url = {http://www.google.com/url?q=http%3A%2F%2Fwww.informatik.uni-leipzig.de%2F~bujack%2F2020Star.pdf&sa=D&sntz=1&usg=AOvVaw3kcbkrjnI7LdvTGfRcw57E},
doi = {https://doi.org/10.1111/cgf.14037},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
journal = {Computer Graphics Forum},
volume = {39},
number = {3},
pages = {811-835},
abstract = {We present a state-of-the-art report on time-dependent flow topology. We survey representative papers in visualization and provide a taxonomy of existing approaches that generalize flow topology from time-independent to time-dependent settings. The approaches are classified based upon four categories: tracking of steady topology, reference frame adaption, pathline classification or clustering, and generalization of critical points. Our unique contributions include introducing a set of desirable mathematical properties to interpret physical meaningfulness for time-dependent flow visualization, inferring mathematical properties associated with selective research papers, and utilizing such properties for classification. The five most important properties identified in the existing literature include coincidence with the steady case, induction of a partition within the domain, Lagrangian invariance, objectivity, and Galilean invariance.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Lukasczyk, Jonas; Garth, Christoph; Larsen, Matthew; Engelke, Wito; Hotz, Ingrid; Rogers, David; Ahrens, James; Maciejewski, Ross
Cinema Darkroom: A Deferred Rendering Framework for Large-Scale Datasets Proceedings Article
In: 2020 IEEE 10th Symposium on Large Data Analysis and Visualization (LDAV), pp. 37–41, IEEE 2020.
@inproceedings{lukasczyk2020cinema,
title = {Cinema Darkroom: A Deferred Rendering Framework for Large-Scale Datasets},
author = {Jonas Lukasczyk and Christoph Garth and Matthew Larsen and Wito Engelke and Ingrid Hotz and David Rogers and James Ahrens and Ross Maciejewski},
url = {https://www.computer.org/csdl/proceedings-article/ldav/2020/846800a037/1pZ0U4aglxe},
doi = {10.1109/LDAV51489.2020.00011},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {2020 IEEE 10th Symposium on Large Data Analysis and Visualization (LDAV)},
pages = {37--41},
organization = {IEEE},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Turton, Terece; Banesh, Divya; Overmyer, Trinity; Sims, Ben; Rogers, David
Enabling Domain Expertise in Scientific Visualization With CinemaScience Journal Article
In: IEEE Computer Graphics and Applications, vol. 40, no. 1, pp. 90-98, 2020, ISSN: 1558-1756, (LA-UR-19-29339).
@article{Turton:2020:VisViewpoints,
title = {Enabling Domain Expertise in Scientific Visualization With CinemaScience},
author = {Terece Turton and Divya Banesh and Trinity Overmyer and Ben Sims and David Rogers},
url = {https://ieeexplore.ieee.org/document/8951775
https://dsscale.org/wp-content/uploads/2020/01/EnablingDomainExpertiseinScientificVisualizationWithCinemaScience.pdf},
doi = {10.1109/MCG.2019.2954171},
issn = {1558-1756},
year = {2020},
date = {2020-01-01},
journal = {IEEE Computer Graphics and Applications},
volume = {40},
number = {1},
pages = {90-98},
abstract = {Scientific users present unique challenges to visualization researchers. Their high-level tasks require them to apply domain-specific expertise. We introduce a broader audience to the CinemaScience project and demonstrate how CinemaScience enables efficient visualization workflows that can bring in scientist expertise and drive scientific insight.},
note = {LA-UR-19-29339},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Orban, Daniel; Banesh, Divya; Banesh, Cameron; Biwer, Christopher; Biswas, Ayan; Saavedra, Ramon; Sweeney, Christine; Sandberg, Richard; Bolme, C A; Ahrens, James; Rogers, David
Cinema:Bandit: a visualization application for beamline science demonstrated on XFEL shock physics experiments Journal Article
In: Journal of Synchrotron Radiation, vol. 27, no. 1, 2020.
@article{Orban:yn5053,
title = {Cinema:Bandit: a visualization application for beamline science demonstrated on XFEL shock physics experiments},
author = {Daniel Orban and Divya Banesh and Cameron Banesh and Christopher Biwer and Ayan Biswas and Ramon Saavedra and Christine Sweeney and Richard Sandberg and C A Bolme and James Ahrens and David Rogers},
url = {https://doi.org/10.1107/S1600577519014322
https://dsscale.org/wp-content/uploads/2019/12/Cinema-Bandit-a-visualization-application-for-beamline-science-demonstrated-on-XFEL-shock-physics-experiments.pdf},
doi = {10.1107/S1600577519014322},
year = {2020},
date = {2020-01-01},
journal = {Journal of Synchrotron Radiation},
volume = {27},
number = {1},
abstract = {A new visualization tool, Cinema:Bandit, and its demonstration with a continuous workflow for analyzing shock physics experiments and visually exploring the data in real time at X-ray light sources is presented. it Cinema:Bandit is an open-source, web-based visualization application in which the experimenter may explore an aggregated dataset to inform real-time beamline decisions and enable it post hoc data analysis. The tool integrates with experimental workflows that process raw detector data into a simple database format, and it allows visualization of disparate data types, including experimental parameters, line graphs, and images. Use of parallel coordinates accommodates the irregular sampling of experimental parameters and allows for display and filtering of both experimental inputs and measurements. The tool is demonstrated on a dataset of shock-compressed titanium collected at the Matter in Extreme Conditions hutch at the Linac Coherent Light Source.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Pulido, Jesus; Lukic, Zarija; Thorman, Paul; Zheng, Caixia; Ahrens, James; Hamann, Bernd
Data Reduction Using Lossy Compression for Cosmology and Astrophysics Workflows Journal Article
In: Journal of Physics: Conference Series, vol. 1290, pp. 012008, 2019.
@article{Pulido_2019,
title = {Data Reduction Using Lossy Compression for Cosmology and Astrophysics Workflows},
author = {Jesus Pulido and Zarija Lukic and Paul Thorman and Caixia Zheng and James Ahrens and Bernd Hamann},
url = {https://doi.org/10.1088%2F1742-6596%2F1290%2F1%2F012008},
doi = {10.1088/1742-6596/1290/1/012008},
year = {2019},
date = {2019-10-01},
journal = {Journal of Physics: Conference Series},
volume = {1290},
pages = {012008},
publisher = {IOP Publishing},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
De, Soumi; Biwer, Christopher M.; Capano, Collin D.; Nitz, Alexander H.; Brown, Duncan A.
Posterior samples of the parameters of binary black holes from Advanced LIGO, Virgo’s second observing run Journal Article
In: Scientific Data, vol. 6, pp. 81, 2019.
@article{De2019,
title = {Posterior samples of the parameters of binary black holes from Advanced LIGO, Virgo’s second observing run},
author = {Soumi De and Christopher M. Biwer and Collin D. Capano and Alexander H. Nitz and Duncan A. Brown },
url = {https://www.nature.com/articles/s41597-019-0086-6},
year = {2019},
date = {2019-06-03},
journal = {Scientific Data},
volume = {6},
pages = {81},
keywords = {},
pubstate = {published},
tppubtype = {article}
}