diff --git a/make-pdf.sh b/make-pdf.sh index 8e0b358..9e1e434 100755 --- a/make-pdf.sh +++ b/make-pdf.sh @@ -22,4 +22,7 @@ ${PYT} json-cleanup-for-pdf.py # unused fixup-latex.py +# for convenience, archive results in main dir +#mv book.pdf ../../pbfl-book-pdflatex.pdf +#tar czvf ../../pbdl-latex-for-arxiv.tar.gz * diff --git a/references.bib b/references.bib index 5afce67..c13d128 100644 --- a/references.bib +++ b/references.bib @@ -43,6 +43,15 @@ year={2024} } +@article{list2025differentiability, + title={Differentiability in unrolled training of neural physics simulators on transient dynamics}, + author={List, Bjoern and Chen, Li-Wei and Bali, Kartik and Thuerey, Nils}, + journal={Computer Methods in Applied Mechanics and Engineering}, + volume={433}, + pages={117441}, + year={2025}, + publisher={Elsevier} +} @inproceedings{shehata2025trunc, @@ -95,20 +104,37 @@ url={https://joss.theoj.org/papers/10.21105/joss.06171}, } +@article{kohl2023benchmarking, + title={Benchmarking autoregressive conditional diffusion models for turbulent flow simulation}, + author={Kohl, Georg and Chen, Li-Wei and Thuerey, Nils}, + journal={arXiv:2309.01745}, + year={2023} +} + +@article{brahmachary2024unsteady, + title={Unsteady cylinder wakes from arbitrary bodies with differentiable physics-assisted neural network}, + author={Brahmachary, Shuvayan and Thuerey, Nils}, + journal={Physical Review E}, + volume={109}, + number={5}, + year={2024}, + publisher={APS} +} + @article{holzschuh2024fm, - title={Solving Inverse Physics Problems with Score Matching}, - author={Benjamin Holzschuh and Nils Thuerey}, - journal={Advances in Neural Information Processing Systems (NeurIPS)}, - volume={36}, - year={2023} + title={Solving Inverse Physics Problems with Score Matching}, + author={Benjamin Holzschuh and Nils Thuerey}, + journal={Advances in Neural Information Processing Systems (NeurIPS)}, + volume={36}, + year={2023} } @article{holzschuh2023smdp, - title={Solving Inverse Physics Problems with Score Matching}, - author={Benjamin Holzschuh and Simona Vegetti and Nils Thuerey}, - journal={Advances in Neural Information Processing Systems (NeurIPS)}, - volume={36}, - year={2023} + title={Solving Inverse Physics Problems with Score Matching}, + author={Benjamin Holzschuh and Simona Vegetti and Nils Thuerey}, + journal={Advances in Neural Information Processing Systems (NeurIPS)}, + volume={36}, + year={2023} } @inproceedings{franz2023nglobt, @@ -120,11 +146,11 @@ } @inproceedings{kohl2023volSim, - title={Learning Similarity Metrics for Volumetric Simulations with Multiscale CNNs}, - author={Kohl, Georg and Chen, Li-Wei and Thuerey, Nils}, - booktitle={AAAI Conference on Artificial Intelligence}, - year={2022}, - url={https://github.com/tum-pbs/VOLSIM}, + title={Learning Similarity Metrics for Volumetric Simulations with Multiscale CNNs}, + author={Kohl, Georg and Chen, Li-Wei and Thuerey, Nils}, + booktitle={AAAI Conference on Artificial Intelligence}, + year={2022}, + url={https://github.com/tum-pbs/VOLSIM}, } @inproceedings{list2022piso, diff --git a/resources/pbdl-arch-figures.key b/resources/pbdl-arch-figures.key index 45f663e..ca33807 100644 Binary files a/resources/pbdl-arch-figures.key and b/resources/pbdl-arch-figures.key differ diff --git a/resources/pbdl-figures.key b/resources/pbdl-figures.key index d31fda2..6d49d9d 100644 Binary files a/resources/pbdl-figures.key and b/resources/pbdl-figures.key differ diff --git a/supervised-arch.md b/supervised-arch.md index 7cbc7f9..51b9dfe 100644 --- a/supervised-arch.md +++ b/supervised-arch.md @@ -148,11 +148,12 @@ Spatial convolutions (left, kernel in orange) and frequency processing in FNOs ( ``` Unfortunately, they're not well suited for higher dimensional problems: Moving from two to three dimensions increases the size of the frequencies to be handled to $M^3$. For the dense layer, this means $M^6$ parameters, a cubic increase. For convolutions, there's no huge difference in 2D: - a regular convolution with kernel size $K$ requires $K^2$ weights in 2D, and induces another $O(K^2)$ scaling for processing features, in total $O(K^4)$. -However, in 3D regular convolutions scale much better: in 3D only the kernel size increases to $K^3$, giving an overall complexity of $O(K^5)$ in 3D. -Thus, the exponent is 5 instead of 6. + a regular convolution with kernel size $K$ requires $K^2$ weights in 2D, and induces another $O(K^2)$ scaling for processing features, in total $O(K^4 N^2)$ for a domain of sie $N^2$. +However, as $K<K$ and $M^6 \gg K^5$. Thus, FNOs would require intractable amounts of parameters, and are thus not recommendable for 3D (or higher dimensional) problems. Architectures like CNNs require much fewer weights, and in conjunction with hierarchies can still handle global dependencies efficiently. +The frequency coverage $M$ of FNOs needs to scale with the size of the spatial domain, hence typically $M>K$ and $M^6 \gg K^5$. +Thus, as $K$ is typically much smaller than $N$ and $M$, and scales with an exponent of 5, CNNs will usually scale much better than FNOs with their 6th power scaling. +They would require intractable amounts of parameters to capture finer features, and are thus not recommendable for 3D (or higher dimensional) problems. CNN-based architectures require much fewer weights, and in conjunction with hierarchies can still handle global dependencies efficiently.