fixed several typos
This commit is contained in:
parent
be1dba99e4
commit
a3de575c19
2
intro.md
2
intro.md
@ -15,7 +15,7 @@ We are living in an era of rapid transformation. These methods have the potentia
|
|||||||
|
|
||||||
```{note}
|
```{note}
|
||||||
_What's new in v0.3?_
|
_What's new in v0.3?_
|
||||||
This latest edition takes things even further with a major new chapter on generative modeling, covering cutting-edge techniques like denoising, flow-matching, autoregressive learning, physics-integrated constraints, and diffusion-based graph networks. We've also introduced a dedicated section on neural architectures specifically designed for physics simulations. All code examples have been updated to leverage the latest frameworks.
|
This latest edition adds a major new chapter on generative modeling, covering powerful techniques like denoising, flow-matching, autoregressive learning, physics-integrated constraints, and diffusion-based graph networks. We've also introduced a dedicated section on neural architectures specifically designed for physics simulations. All code examples have been updated to leverage the latest frameworks.
|
||||||
```
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|||||||
@ -62,7 +62,7 @@ In several instances we'll make use of the fundamental theorem of calculus, repe
|
|||||||
$$f(x+\Delta) = f(x) + \int_0^1 \text{d}s ~ f'(x+s \Delta) \Delta \ . $$
|
$$f(x+\Delta) = f(x) + \int_0^1 \text{d}s ~ f'(x+s \Delta) \Delta \ . $$
|
||||||
|
|
||||||
In addition, we'll make use of Lipschitz-continuity with constant $\mathcal L$:
|
In addition, we'll make use of Lipschitz-continuity with constant $\mathcal L$:
|
||||||
$|f(x+\Delta) + f(x)|\le \mathcal L \Delta$, and the well-known Cauchy-Schwartz inequality:
|
$|f(x+\Delta) - f(x)|\le \mathcal L \Delta$, and the well-known Cauchy-Schwartz inequality:
|
||||||
$ u^T v \le |u| \cdot |v| $.
|
$ u^T v \le |u| \cdot |v| $.
|
||||||
|
|
||||||
## Newton's method
|
## Newton's method
|
||||||
|
|||||||
@ -31,7 +31,7 @@
|
|||||||
"Given a data point $x_0$, we can sample the noisy latent state $x_t$ from the forward Markov chain via\n",
|
"Given a data point $x_0$, we can sample the noisy latent state $x_t$ from the forward Markov chain via\n",
|
||||||
"\n",
|
"\n",
|
||||||
"$$\n",
|
"$$\n",
|
||||||
" q(x_t|x_0) = \\mathcal{N}(x_t, \\sqrt{\\overline{\\alpha}_t}x_0, (1-\\overline{\\alpha}_t)I)) ,\n",
|
" q(x_t|x_0) = \\mathcal{N}(\\sqrt{\\overline{\\alpha}_t}x_0, (1-\\overline{\\alpha}_t)I)) ,\n",
|
||||||
"$$\n",
|
"$$\n",
|
||||||
"\n",
|
"\n",
|
||||||
"with the inverted weights $\\alpha_t = 1 - \\beta_t$ and alphas accumulated for time $t$ denoted by\n",
|
"with the inverted weights $\\alpha_t = 1 - \\beta_t$ and alphas accumulated for time $t$ denoted by\n",
|
||||||
|
|||||||
@ -13,6 +13,14 @@
|
|||||||
@STRING{NeurIPS = "Advances in Neural Information Processing Systems"}
|
@STRING{NeurIPS = "Advances in Neural Information Processing Systems"}
|
||||||
|
|
||||||
|
|
||||||
|
@article{braun2025msbg,
|
||||||
|
title ={{Adaptive Phase-Field-FLIP for Very Large Scale Two-Phase Fluid Simulation}},
|
||||||
|
author = {Braun, Bernhard and Bender, Jan and Thuerey, Nils},
|
||||||
|
journal = {{ACM} Transaction on Graphics},
|
||||||
|
volume = {44 (3)},
|
||||||
|
year = {2025},
|
||||||
|
publisher = {ACM},
|
||||||
|
}
|
||||||
|
|
||||||
@inproceedings{lino2025dgn,
|
@inproceedings{lino2025dgn,
|
||||||
title={Learning Distributions of Complex Fluid Simulations with Diffusion Graph Networks},
|
title={Learning Distributions of Complex Fluid Simulations with Diffusion Graph Networks},
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user