@inproceedings{56c5be0bba8147bc9dbe69ad62bd04cf,
title = "Distributed workflows for modeling experimental data",
abstract = "Modeling helps explain the fundamental physics hidden behind experimental data. In the case of material modeling, running one simulation rarely results in output that reproduces the experimental data. Often one or more of the force field parameters are not precisely known and must be optimized for the output to match that of the experiment. Since the simulations require high performance computing (HPC) resources and there are usually many simulations to run, a workflow is very useful to prevent errors and assure that the simulations are identical except for the parameters that need to be varied. The use of HPC implies distributed workflows, but the optimization and steps to compare the simulation results and experimental data are done on a local workstation. We will present results from force field refinement of data collected at the Spallation Neutron Source using Kepler, Pegasus, and BEAM workflows and discuss what we have learned from using these workflows.",
keywords = "experiments, modeling, simulations, workflows",
author = "Lynch, \{Vickie E.\} and Calvo, \{Jose Borreguero\} and Ewa Deelman and \{Da Silva\}, \{Rafael Ferreira\} and Monojoy Goswami and Yawei Hui and Eric Lingerfelt and Vetter, \{Jeffrey S.\}",
note = "Publisher Copyright: {\textcopyright} 2017 IEEE.; 2017 IEEE High Performance Extreme Computing Conference, HPEC 2017 ; Conference date: 12-09-2017 Through 14-09-2017",
year = "2017",
month = oct,
day = "30",
doi = "10.1109/HPEC.2017.8091071",
language = "English",
series = "2017 IEEE High Performance Extreme Computing Conference, HPEC 2017",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
booktitle = "2017 IEEE High Performance Extreme Computing Conference, HPEC 2017",
address = "United States",
}