@conference {811, title = {MIAMI: A Framework for Application Performance Diagnosis }, booktitle = {IPASS-2014}, year = {2014}, month = {2014-03}, publisher = {IEEE}, organization = {IEEE}, address = {Monterey, CA}, abstract = {A typical application tuning cycle repeats the following three steps in a loop: performance measurement, analysis of results, and code refactoring. While performance measurement is well covered by existing tools, analysis of results to understand the main sources of inefficiency and to identify opportunities for optimization is generally left to the user. Today{\textquoteright}s state of the art performance analysis tools use instrumentation or hardware counter sampling to measure the performance of interactions between code and the target architecture during execution. Such measurements are useful to identify hotspots in applications, places where execution time is spent or where cache misses are incurred. However, explanatory understanding of tuning opportunities requires a more detailed, mechanistic modeling approach. This paper presents MIAMI (Machine Independent Application Models for performance Insight), a set of tools for automatic performance diagnosis. MIAMI uses application characterization and models of target architectures to reason about an application{\textquoteright}s performance. MIAMI uses a modeling approach based on first-order principles to identify performance bottlenecks, pinpoint optimization opportunities, and compute bounds on the potential for improvement.}, isbn = {978-1-4799-3604-5}, doi = {10.1109/ISPASS.2014.6844480}, author = {Gabriel Marin and Jack Dongarra and Dan Terpstra} }