From 370ee58d4baa3b468ba055b6773a9ad0a007f2ca Mon Sep 17 00:00:00 2001 From: N_T Date: Tue, 14 Nov 2023 21:18:45 +0100 Subject: [PATCH] fixed typo, and script update --- diffphys-code-ns.ipynb | 4 ++-- make-pdf.sh | 11 ++++++++--- references.bib | 9 +++++++++ 3 files changed, 19 insertions(+), 5 deletions(-) diff --git a/diffphys-code-ns.ipynb b/diffphys-code-ns.ipynb index b21557c..a20d127 100644 --- a/diffphys-code-ns.ipynb +++ b/diffphys-code-ns.ipynb @@ -626,7 +626,7 @@ "This example illustrated how the differentiable physics approach can easily be extended towards significantly more \n", "complex PDEs. Above, we've optimized for a mini-batch of 20 steps of a full Navier-Stokes solver.\n", "\n", - "This is a powerful basis to bring NNs into the picture. As you might have noticed, our degrees of freedom were still a regular grid, and we've jointly solved a single inverse problem. There were three cases to solve as a mini-batch, of course, but nonetheless the setup still represents a direct optimization. Thus, in line with the PINN example of {doc}`physicalloss-code` we've not really dealt with a _machine learning_ task here. However, DP training allows for a range of flexible compinations with NNs that will be the topic of the next chapters.\n" + "This is a powerful basis to bring NNs into the picture. As you might have noticed, our degrees of freedom were still a regular grid, and we've jointly solved a single inverse problem. There were three cases to solve as a mini-batch, of course, but nonetheless the setup still represents a direct optimization. Thus, in line with the PINN example of {doc}`physicalloss-code` we've not really dealt with a _machine learning_ task here. However, DP training allows for a range of flexible combinations with NNs that will be the topic of the next chapters.\n" ] }, { @@ -671,4 +671,4 @@ }, "nbformat": 4, "nbformat_minor": 1 -} \ No newline at end of file +} diff --git a/make-pdf.sh b/make-pdf.sh index a788ca6..3601d04 100755 --- a/make-pdf.sh +++ b/make-pdf.sh @@ -1,6 +1,7 @@ # source this file with "." in a shell # note this script assumes the following paths/versions: python3.7 , /Users/thuerey/Library/Python/3.7/bin/jupyter-book +# updated for nMBA ! # do clean git checkout for changes from json-cleanup-for-pdf.py via: # git checkout diffphys-code-burgers.ipynb diffphys-code-ns.ipynb diffphys-code-sol.ipynb physicalloss-code.ipynb bayesian-code.ipynb supervised-airfoils.ipynb reinflearn-code.ipynb physgrad-code.ipynb physgrad-comparison.ipynb physgrad-hig-code.ipynb @@ -9,13 +10,17 @@ echo echo WARNING - still requires one manual quit of first pdf/latex pass, use shift-x to quit echo +PYT=python3.7 +PYT=python3 + # warning - modifies notebooks! -python3.7 json-cleanup-for-pdf.py +${PYT} json-cleanup-for-pdf.py # clean / remove _build dir ? # GEN! -/Users/thuerey/Library/Python/3.7/bin/jupyter-book build . --builder pdflatex +#/Users/thuerey/Library/Python/3.7/bin/jupyter-book build . --builder pdflatex +/Users/thuerey/Library/Python/3.9/bin/jupyter-book build . --builder pdflatex cd _build/latex #mv book.pdf book-xetex.pdf # not necessary, failed anyway @@ -29,7 +34,7 @@ mv book.aux book-in.aux mv book.toc book-in.toc #mv sphinxmanual.cls sphinxmanual-in.cls -python3.7 ../../fixup-latex.py +${PYT} ../../fixup-latex.py # reads book-in.tex -> writes book-in2.tex # remove unicode chars via unix iconv diff --git a/references.bib b/references.bib index 081088c..48324ff 100644 --- a/references.bib +++ b/references.bib @@ -13,6 +13,15 @@ @STRING{NeurIPS = "Advances in Neural Information Processing Systems"} + +@article{prantl2022guaranteed, + title={Guaranteed conservation of momentum for learning particle-based fluid dynamics}, + author={Prantl, Lukas and Ummenhofer, Benjamin and Koltun, Vladlen and Thuerey, Nils}, + journal={Advances in Neural Information Processing Systems}, + volume={35}, + year={2022} +} + @inproceedings{schnell2022hig, title={Half-Inverse Gradients for Physical Deep Learning}, author={Schnell, Patrick and Holl, Philipp and Thuerey, Nils},