-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathpaper.bib
181 lines (168 loc) · 13.2 KB
/
paper.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
@article{PATERA1984,
title = {A spectral element method for fluid dynamics: Laminar flow in a channel expansion},
journal = {Journal of Computational Physics},
volume = {54},
number = {3},
pages = {468-488},
year = {1984},
issn = {0021-9991},
doi = {https://doi.org/10.1016/0021-9991(84)90128-1},
author = {Anthony T Patera},
abstract = {A spectral element method that combines the generality of the finite element method with the accuracy of spectral techniques is proposed for the numerical solution of the incompressible Navier-Stokes equations. In the spectral element discretization, the computational domain is broken into a series of elements, and the velocity in each element is represented as a high-order Lagrangian interpolant through Chebyshev collocation points. The hyperbolic piece of the governing equations is then treated with an explicit collocation scheme, while the pressure and viscous contributions are treated implicitly with a projection operator derived from a variational principle. The implementation of the technique is demonstrated on a one-dimensional inflow-outflow advection-diffusion equation, and the method is then applied to laminar two-dimensional (separated) flow in a channel expansion. Comparisons are made with experiment and previous numerical work.}
}
@Misc{nek5000-web-page,
author = {Fischer, Paul and Lottes, James W. and Kerkemeier, Stefan G. },
note = {\url{http://nek5000.mcs.anl.gov}},
title = {{Nek5000} {w}eb page},
year = {2008},
}
@Misc{pymech,
author = {Mohanan, Ashwin Vishnu and Chauvat, Guillaume and Kleine, Vitor Gabriel and Fabbiane, Nicolò and Canton, Jacopo},
title = {Pymech: A Python software suite for Nek5000 and SIMSON},
year = {2022},
doi = {10.5281/zenodo.7358961},
publisher = {GitHub},
journal = {GitHub repository},
}
@article{fischer2021nekrs,
title={NekRS, a GPU-Accelerated Spectral Element Navier-Stokes Solver},
author={Fischer, Paul and Kerkemeier, Stefan and Min, Misun and Lan, Yu-Hsiang and Phillips, Malachi and Rathnayake, Thilina and Merzari, Elia and Tomboulides, Ananias and Karakus, Ali and Chalmers, Noel and others},
journal={arXiv preprint arXiv:2104.05829},
year={2021}
}
@article{jansson2024,
title = {Neko: A modern, portable, and scalable framework for high-fidelity computational fluid dynamics},
journal = {Computers \& Fluids},
volume = {275},
pages = {106243},
year = {2024},
issn = {0045-7930},
doi = {https://doi.org/10.1016/j.compfluid.2024.106243},
author = {Niclas Jansson and Martin Karp and Artur Podobas and Stefano Markidis and Philipp Schlatter},
keywords = {Computational fluid dynamics, Spectral element method, Turbulence, Direct numerical simulation}
}
@inproceedings{jansson2023,
author = {Jansson, Niclas and Karp, Martin and Perez, Adalberto and Mukha, Timofey and Ju, Yi and Liu, Jiahui and P\'{a}ll, Szil\'{a}rd and Laure, Erwin and Weinkauf, Tino and Schumacher, J\"{o}rg and Schlatter, Philipp and Markidis, Stefano},
title = {Exploring the Ultimate Regime of Turbulent Rayleigh–B\'{e}nard Convection Through Unprecedented Spectral-Element Simulations},
year = {2023},
isbn = {9798400701092},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
url = {https://doi.org/10.1145/3581784.3627039},
doi = {10.1145/3581784.3627039},
abstract = {We detail our developments in the high-fidelity spectral-element code Neko that are essential for unprecedented large-scale direct numerical simulations of fully developed turbulence. Major innovations are modular multi-backend design enabling performance portability across a wide range of GPUs and CPUs, a GPU-optimized preconditioner with task overlapping for the pressure-Poisson equation and in-situ data compression. We carry out initial runs of Rayleigh-B\'{e}nard Convection (RBC) at extreme scale on the LUMI and Leonardo supercomputers. We show how Neko is able to strongly scale to 16,384 GPUs and obtain results that are not possible without careful consideration and optimization of the entire simulation workflow. These developments in Neko will help resolving the long-standing question regarding the ultimate regime in RBC.},
booktitle = {Proceedings of the International Conference for High Performance Computing, Networking, Storage and Analysis},
articleno = {5},
numpages = {9},
location = {Denver, CO, USA},
series = {SC '23}
}
@article{mpi4py,
title = {MPI for Python},
journal = {Journal of Parallel and Distributed Computing},
volume = {65},
number = {9},
pages = {1108-1115},
year = {2005},
issn = {0743-7315},
doi = {https://doi.org/10.1016/j.jpdc.2005.03.010},
author = {Lisandro Dalcín and Rodrigo Paz and Mario Storti},
keywords = {Message passing, MPI, High level languages, Parallel Python},
abstract = {MPI for Python provides bindings of the Message Passing Interface (MPI) standard for the Python programming language and allows any Python program to exploit multiple processors. This package is constructed on top of the MPI-1 specification and defines an object-oriented interface which closely follows MPI-2 C++bindings. It supports point-to-point (sends, receives) and collective (broadcasts, scatters, gathers) communications of general Python objects. Efficiency has been tested in a Beowulf class cluster and satisfying results were obtained. MPI for Python is open source and available for download on the web (http://www.cimec.org.ar/python).}
}
@article{adios2,
title={Adios 2: The adaptable input output system. a framework for high-performance data management},
author={Godoy, William F and Podhorszki, Norbert and Wang, Ruonan and Atkins, Chuck and Eisenhauer, Greg and Gu, Junmin and Davis, Philip and Choi, Jong and Germaschewski, Kai and Huck, Kevin and others},
journal={SoftwareX},
volume={12},
pages={100561},
year={2020},
publisher={Elsevier}
}
@Article{numpy,
title = {Array programming with {NumPy}},
author = {Charles R. Harris and K. Jarrod Millman and St{\'{e}}fan J.
van der Walt and Ralf Gommers and Pauli Virtanen and David
Cournapeau and Eric Wieser and Julian Taylor and Sebastian
Berg and Nathaniel J. Smith and Robert Kern and Matti Picus
and Stephan Hoyer and Marten H. van Kerkwijk and Matthew
Brett and Allan Haldane and Jaime Fern{\'{a}}ndez del
R{\'{i}}o and Mark Wiebe and Pearu Peterson and Pierre
G{\'{e}}rard-Marchant and Kevin Sheppard and Tyler Reddy and
Warren Weckesser and Hameer Abbasi and Christoph Gohlke and
Travis E. Oliphant},
year = {2020},
month = sep,
journal = {Nature},
volume = {585},
number = {7825},
pages = {357--362},
doi = {10.1038/s41586-020-2649-2},
publisher = {Springer Science and Business Media {LLC}},
url = {https://doi.org/10.1038/s41586-020-2649-2}
}
@incollection{fischer2006,
title = { - Parallel Simulation of High Reynolds Number Vascular Flows},
editor = {Anil Deane and Akin Ecer and James McDonough and Nobuyuki Satofuka and Gunther Brenner and David R. Emerson and Jacques Periaux and Damien Tromeur-Dervout},
booktitle = {Parallel Computational Fluid Dynamics 2005},
publisher = {Elsevier},
address = {Amsterdam},
pages = {219-226},
year = {2006},
isbn = {978-0-444-52206-1},
doi = {https://doi.org/10.1016/B978-044452206-1/50026-4},
url = {https://www.sciencedirect.com/science/article/pii/B9780444522061500264},
author = {Paul Fischer and Francis Loth and Sang-Wook Lee and David Smith and Henry Tufo and Hisham Bassiouny},
abstract = {Publisher Summary
The chapter provides an overview of the governing equations, time advancement scheme, and spectral element method. The chapter describes boundary condition treatment for simulating transition in bifurcation geometries. The chapter also presents parallel considerations and performance results, and provides results for transitional flow in an arteriovenous graft model. The simulation of turbulent vascular flows presents significant numerical challenges. Because such flows are weakly turbulent, they lack an inertial subrange that is amenable to subgrid-scale (SGS) modeling required for large-eddy or Reynolds-averaged Navier–Stokes simulations. The only reliable approach at present is to directly resolve all scales of motion. While the Reynolds number is not high, the physical dissipation is small. Weakly turbulent blood flow—such as the one that occurs in post-stenotic regions or subsequent to graft implantation—exhibits a much broader range of scales than does its laminar counterpart, and thus requires an order of magnitude increase in spatial and temporal resolution, making fast iterative solvers and parallel computing necessities.}
}
@Article{elkhoury2013,
author={El Khoury, George K.
and Schlatter, Philipp
and Noorani, Azad
and Fischer, Paul F.
and Brethouwer, Geert
and Johansson, Arne V.},
title={Direct Numerical Simulation of Turbulent Pipe Flow at Moderately High Reynolds Numbers},
journal={Flow, Turbulence and Combustion},
year={2013},
month={Oct},
day={01},
volume={91},
number={3},
pages={475-495},
abstract={Fully resolved direct numerical simulations (DNSs) have been performed with a high-order spectral element method to study the flow of an incompressible viscous fluid in a smooth circular pipe of radius R and axial length 25R in the turbulent flow regime at four different friction Reynolds numbers Re$\tau${\thinspace}={\thinspace}180, 360, 550 and {\$}1{\backslash}text{\{},{\}}000{\$}. The new set of data is put into perspective with other simulation data sets, obtained in pipe, channel and boundary layer geometry. In particular, differences between different pipe DNS are highlighted. It turns out that the pressure is the variable which differs the most between pipes, channels and boundary layers, leading to significantly different mean and pressure fluctuations, potentially linked to a stronger wake region. In the buffer layer, the variation with Reynolds number of the inner peak of axial velocity fluctuation intensity is similar between channel and boundary layer flows, but lower for the pipe, while the inner peak of the pressure fluctuations show negligible differences between pipe and channel flows but is clearly lower than that for the boundary layer, which is the same behaviour as for the fluctuating wall shear stress. Finally, turbulent kinetic energy budgets are almost indistinguishable between the canonical flows close to the wall (up to y{\thinspace}+{\thinspace}{\thinspace}≈{\thinspace}100), while substantial differences are observed in production and dissipation in the outer layer. A clear Reynolds number dependency is documented for the three flow configurations.},
issn={1573-1987},
doi={10.1007/s10494-013-9482-8},
url={https://doi.org/10.1007/s10494-013-9482-8}
}
@article{mallor2024,
title = {High-fidelity simulations of the flow around a NACA 4412 wing section at high angles of attack},
journal = {International Journal of Heat and Fluid Flow},
volume = {110},
pages = {109590},
year = {2024},
issn = {0142-727X},
doi = {https://doi.org/10.1016/j.ijheatfluidflow.2024.109590},
url = {https://www.sciencedirect.com/science/article/pii/S0142727X24003151},
author = {Fermin Mallor and Ricardo Vinuesa and Ramis Örlü and Philipp Schlatter},
keywords = {Large-eddy simulation, Turbulent boundary layers, Wings, Adverse-pressure gradient, Flow separation},
abstract = {This study uses high-resolution large-eddy simulations (LES) to investigate the turbulent flow around a NACA 4412 wing profile at multiple Reynolds numbers based on chord length and free-stream velocity (Rec=2×105, 4×105 and 106) and angles of attack (AoA=5∘, 8°, 11° and 14°). The introduction of adaptive mesh refinement (AMR) and non-conformal meshing into the spectral-element-method code Nek5000 enabled the simulations at higher AoAs exhibiting flow separation by enabling the use of wider domains, allowing to capture the largest turbulent scales associated with flow separation. The results provide a detailed database – including integral quantities, velocity statistics and spectra – which may be used for the evaluation of lower-fidelity turbulence models. Furthermore, closer inspection of specific turbulent-boundary-layer (TBL) profiles allows us to discern between pressure-gradient (PG) and Reynolds-numbers effects on TBLs, showing that Re balances the PG, by reducing the impact of PG on the flow. Lastly, we assess the influence of flow history on TBLs, showing that a consistent flow history over an extended length is needed for TBLs to exhibit comparable profiles and characteristics.}
}
@article{merzari2020,
author = {Elia Merzari and Paul Fischer and Misun Min and Stefan Kerkemeier and Aleksandr Obabko and Dillon Shaver and Haomin Yuan and Yiqi Yu and Javier Martinez and Landon Brockmeyer and Lambert Fick and Giacomo Busco and Alper Yildiz and Yassin Hassan},
title = {Toward Exascale: Overview of Large Eddy Simulations and Direct Numerical Simulations of Nuclear Reactor Flows with the Spectral Element Method in Nek5000},
journal = {Nuclear Technology},
volume = {206},
number = {9},
pages = {1308--1324},
year = {2020},
publisher = {Taylor \& Francis},
doi = {10.1080/00295450.2020.1748557},
URL = {
https://doi.org/10.1080/00295450.2020.1748557
},
eprint = {
https://doi.org/10.1080/00295450.2020.1748557
}
}