diff --git a/diffphys-code-sol.ipynb b/diffphys-code-sol.ipynb index 3d66715..4c87093 100644 --- a/diffphys-code-sol.ipynb +++ b/diffphys-code-sol.ipynb @@ -1357,7 +1357,7 @@ "err_source = vx_ref - vx_src \n", "err_hybrid = vx_ref - vx_hyb \n", "v = np.concatenate([err_source,err_hybrid], axis=1)\n", - "axes[3].imshow( v , origin='lower', cmap='magma')\n", + "axes[3].imshow( v , origin='lower', cmap='cividis')\n", "axes[3].set_title(f\" Errors: Source & Learned\")\n", "\n", "pylab.tight_layout()\n" diff --git a/make-pdf.sh b/make-pdf.sh index 0065c3f..fc171b4 100755 --- a/make-pdf.sh +++ b/make-pdf.sh @@ -20,9 +20,11 @@ python3.7 json-cleanup-for-pdf.py /Users/thuerey/Library/Python/3.7/bin/jupyter-book build . --builder pdflatex cd _build/latex -#mv book.pdf book-xetex.pdf # failed anyway +#mv book.pdf book-xetex.pdf # not necessary, failed anyway +# this generates book.tex rm -f book-in.tex sphinxmessages-in.sty book-in.aux book-in.toc +# rename book.tex -> book-in.tex (this is the original output!) mv book.tex book-in.tex mv sphinxmessages.sty sphinxmessages-in.sty mv book.aux book-in.aux @@ -30,9 +32,10 @@ mv book.toc book-in.toc #mv sphinxmanual.cls sphinxmanual-in.cls python3.7 ../../fixup-latex.py -# generates book-in2.tex +# reads book-in.tex -> writes book-in2.tex # remove unicode chars via unix iconv +# reads book-in2.tex -> writes book.tex iconv -c -f utf-8 -t ascii book-in2.tex > book.tex # finally run pdflatex, now it should work: diff --git a/overview-equations.md b/overview-equations.md index 1a76892..044bffe 100644 --- a/overview-equations.md +++ b/overview-equations.md @@ -28,7 +28,7 @@ $$ (learn-l2) We typically optimize, i.e. _train_, with a stochastic gradient descent (SGD) optimizer of choice, e.g. Adam {cite}`kingma2014adam`. -We'll rely on auto-diff to compute the gradient w.r.t. weights, $\partial f / \partial \theta$, +We'll rely on auto-diff to compute the gradient of a scalar loss $L$ w.r.t. the weights, $\partial L / \partial \theta$, We will also assume that $e$ denotes a _scalar_ error function (also called cost, or objective function). It is crucial for the efficient calculation of gradients that this function is scalar.