scripts for pdf
This commit is contained in:
parent
665637bc04
commit
dd5abbc430
@ -36,7 +36,7 @@ reSkip.append( re.compile(r"set....font{Free") ) ; reSCnt.append( 7 )
|
|||||||
reSkip.append( re.compile(r"ucharclasses") ) ; reSCnt.append( 1 )
|
reSkip.append( re.compile(r"ucharclasses") ) ; reSCnt.append( 1 )
|
||||||
reSkip.append( re.compile(r"unicode-math") ) ; reSCnt.append( 1 )
|
reSkip.append( re.compile(r"unicode-math") ) ; reSCnt.append( 1 )
|
||||||
|
|
||||||
#reSkip.append( re.compile(r"") )
|
|
||||||
#reSkip.append( re.compile(r"") )
|
#reSkip.append( re.compile(r"") )
|
||||||
#reSkip.append( re.compile(r"") )
|
#reSkip.append( re.compile(r"") )
|
||||||
#reSkip.append( re.compile(r"") )
|
#reSkip.append( re.compile(r"") )
|
||||||
@ -85,6 +85,8 @@ def parseF(inf,outf,reSkip,reSCnt):
|
|||||||
|
|
||||||
parseF(inf,outf,reSkip,reSCnt)
|
parseF(inf,outf,reSkip,reSCnt)
|
||||||
|
|
||||||
|
#---
|
||||||
|
|
||||||
inf = "sphinxmessages-in.sty"
|
inf = "sphinxmessages-in.sty"
|
||||||
outf = "sphinxmessages.sty"
|
outf = "sphinxmessages.sty"
|
||||||
|
|
||||||
@ -93,3 +95,19 @@ reSkip.append( re.compile(r"addto.captionsenglish") ) ; reSCnt.append( 1 )
|
|||||||
|
|
||||||
parseF(inf,outf,reSkip,reSCnt)
|
parseF(inf,outf,reSkip,reSCnt)
|
||||||
|
|
||||||
|
#---
|
||||||
|
|
||||||
|
inf = "book-in.aux"
|
||||||
|
outf = "book.aux"
|
||||||
|
|
||||||
|
reSkip = [] ; reSCnt = []
|
||||||
|
reSkip.append( re.compile(r"selectlanguage...english") ) ; reSCnt.append( 1 )
|
||||||
|
|
||||||
|
parseF(inf,outf,reSkip,reSCnt)
|
||||||
|
|
||||||
|
#---
|
||||||
|
|
||||||
|
# same, selectlanguage
|
||||||
|
inf = "book-in.toc"
|
||||||
|
outf = "book.toc"
|
||||||
|
parseF(inf,outf,reSkip,reSCnt)
|
||||||
|
@ -1,51 +1,85 @@
|
|||||||
import json, re
|
import json, re, os
|
||||||
|
|
||||||
fn="diffphys-code-burgers.ipynb"
|
fileList = [
|
||||||
fnOut="diffphys-code-burgers-r.ipynb"
|
"diffphys-code-burgers.ipynb", "diffphys-code-sol.ipynb", "physicalloss-code.ipynb", # TF
|
||||||
|
"bayesian-code.ipynb", "supervised-airfoils.ipynb" # pytorch
|
||||||
|
]
|
||||||
|
|
||||||
with open(fn) as file:
|
for fnOut in fileList:
|
||||||
d = json.load(file)
|
fn = fnOut[:-5] + "bak"
|
||||||
|
print("renaming "+fnOut+ " to "+fn )
|
||||||
|
if os.path.isfile(fnOut):
|
||||||
|
os.rename(fnOut, fn)
|
||||||
|
if not os.path.isfile(fn):
|
||||||
|
print("Error: "+fn+" missing!")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
print(d.keys())
|
#continue # exit(1)
|
||||||
#print(d["cells"][0].keys())
|
|
||||||
|
|
||||||
re1 = re.compile(r"WARNING:tensorflow:")
|
#fn="diffphys-code-burgers.ipynb"
|
||||||
|
#fnOut="diffphys-code-burgers-r.ipynb"
|
||||||
|
|
||||||
t="cells"
|
with open(fn) as file:
|
||||||
for i in range(len(d[t])):
|
d = json.load(file)
|
||||||
#for i in range(len(d[t])):
|
|
||||||
#print(d[t][0]["cell_type"])
|
|
||||||
#print(d[t][i]["cell_type"])
|
|
||||||
|
|
||||||
# remove images after code
|
#print(d.keys()) #print(d["cells"][0].keys())
|
||||||
|
|
||||||
if d[t][i]["cell_type"]=="code":
|
re1 = re.compile(r"WARNING:tensorflow:")
|
||||||
#print(d[t][i].keys())
|
re2 = re.compile(r"UserWarning:")
|
||||||
#d[t][i]["outputs"] = ""
|
|
||||||
#print(d[t][i]["outputs"])
|
|
||||||
|
|
||||||
#print(len( d[t][i]["outputs"] ))
|
t="cells"
|
||||||
for j in range(len( d[t][i]["outputs"] )):
|
okay = 0
|
||||||
#print(type( d[t][i]["outputs"][j] ))
|
deletes = 0
|
||||||
#print( d[t][i]["outputs"][j].keys() )
|
for i in range(len(d[t])):
|
||||||
|
#for i in range(len(d[t])):
|
||||||
|
#print(d[t][0]["cell_type"])
|
||||||
|
#print(d[t][i]["cell_type"])
|
||||||
|
|
||||||
# images
|
# remove images after code
|
||||||
if d[t][i]["outputs"][j]["output_type"]=="stream":
|
|
||||||
print( len( d[t][i]["outputs"][j]["text"] ) )
|
|
||||||
|
|
||||||
dell = []
|
if d[t][i]["cell_type"]=="code":
|
||||||
for k in range( len( d[t][i]["outputs"][j]["text"] ) ):
|
#print(d[t][i].keys())
|
||||||
num = re1.search( d[t][i]["outputs"][j]["text"][k] )
|
#d[t][i]["outputs"] = ""
|
||||||
if num is not None:
|
#print(d[t][i]["outputs"])
|
||||||
dell.append(d[t][i]["outputs"][j]["text"][k])
|
|
||||||
print( format(num) +" " + d[t][i]["outputs"][j]["text"][k] ) # len( d[t][i]["outputs"][j]["text"][k] ) )
|
|
||||||
for dl in dell:
|
|
||||||
d[t][i]["outputs"][j]["text"].remove(dl)
|
|
||||||
|
|
||||||
print( format( len( d[t][i]["outputs"][j]["text"] )) + " A")
|
#print(len( d[t][i]["outputs"] ))
|
||||||
|
for j in range(len( d[t][i]["outputs"] )):
|
||||||
|
#print(type( d[t][i]["outputs"][j] ))
|
||||||
|
#print( d[t][i]["outputs"][j].keys() )
|
||||||
|
|
||||||
#print(d["cells"])
|
# images
|
||||||
|
if d[t][i]["outputs"][j]["output_type"]=="stream":
|
||||||
|
print( len( d[t][i]["outputs"][j]["text"] ) )
|
||||||
|
|
||||||
with open(fnOut,'w') as fileOut:
|
dell = [] # collect entries to delete
|
||||||
json.dump(d,fileOut, indent=1, sort_keys=True)
|
for k in range( len( d[t][i]["outputs"][j]["text"] ) ):
|
||||||
|
nums = []
|
||||||
|
nums.append( re1.search( d[t][i]["outputs"][j]["text"][k] ) )
|
||||||
|
nums.append( re2.search( d[t][i]["outputs"][j]["text"][k] ) )
|
||||||
|
if (nums[0] is None) and (nums[1] is None):
|
||||||
|
okay = okay+1
|
||||||
|
else: # delete line "dell"
|
||||||
|
deletes = deletes+1
|
||||||
|
dell.append(d[t][i]["outputs"][j]["text"][k])
|
||||||
|
print( format(nums) +" " + d[t][i]["outputs"][j]["text"][k] ) # len( d[t][i]["outputs"][j]["text"][k] ) )
|
||||||
|
|
||||||
|
for dl in dell:
|
||||||
|
d[t][i]["outputs"][j]["text"].remove(dl)
|
||||||
|
|
||||||
|
print( format( len( d[t][i]["outputs"][j]["text"] )) + " A")
|
||||||
|
|
||||||
|
#print(d["cells"])
|
||||||
|
|
||||||
|
if deletes==0:
|
||||||
|
print("Warning: Nothing found in "+fn+"!")
|
||||||
|
if not os.path.isfile(fnOut):
|
||||||
|
os.rename(fn, fnOut)
|
||||||
|
else:
|
||||||
|
print("Error, both files exist!?")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
else:
|
||||||
|
print(" ... writing "+fnOut )
|
||||||
|
with open(fnOut,'w') as fileOut:
|
||||||
|
json.dump(d,fileOut, indent=1, sort_keys=True)
|
||||||
|
|
||||||
|
21
make-pdf.sh
21
make-pdf.sh
@ -1,16 +1,18 @@
|
|||||||
# source this file with "." in a shell
|
# source this file with "." in a shell
|
||||||
|
|
||||||
|
# do clean git checkout?
|
||||||
|
|
||||||
#DIR=/Users/thuerey/Dropbox/mbaDevelSelected/pbdl-book/
|
#DIR=/Users/thuerey/Dropbox/mbaDevelSelected/pbdl-book/
|
||||||
DIR=/Users/thuerey/Dropbox/mbaDevelSelected/pbdl-book-cleanCheckout/
|
#DIR=/Users/thuerey/Dropbox/mbaDevelSelected/pbdl-book-cleanCheckout/
|
||||||
cd ${DIR}
|
#cd ${DIR}
|
||||||
|
|
||||||
|
|
||||||
# warning - modifies notebooks!
|
# warning - modifies notebooks!
|
||||||
python3.7 json-cleanup-for-pdf.py
|
python3.7 json-cleanup-for-pdf.py
|
||||||
|
#TEMP!!!! python3.7 ../pbdl-book/json-cleanup-for-pdf.py
|
||||||
exit
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# clean / remove _build dir ?
|
||||||
|
|
||||||
# GEN!
|
# GEN!
|
||||||
/Users/thuerey/Library/Python/3.7/bin/jupyter-book build . --builder pdflatex
|
/Users/thuerey/Library/Python/3.7/bin/jupyter-book build . --builder pdflatex
|
||||||
@ -18,16 +20,23 @@ exit
|
|||||||
|
|
||||||
cd _build/latex
|
cd _build/latex
|
||||||
|
|
||||||
rm -f book-in.tex sphinxmessages-in.sty
|
rm -f book-in.tex sphinxmessages-in.sty book-in.aux book-in.toc
|
||||||
mv book.tex book-in.tex
|
mv book.tex book-in.tex
|
||||||
mv sphinxmessages.sty sphinxmessages-in.sty
|
mv sphinxmessages.sty sphinxmessages-in.sty
|
||||||
|
mv book.aux book-in.aux
|
||||||
|
mv book.toc book-in.toc
|
||||||
|
|
||||||
|
#TEMP!!!! python3.7 ../../../pbdl-book/fixup-latex.py
|
||||||
python3.7 ../../fixup-latex.py
|
python3.7 ../../fixup-latex.py
|
||||||
# generates book-in2.tex
|
# generates book-in2.tex
|
||||||
|
|
||||||
# remove unicode chars
|
# remove unicode chars
|
||||||
iconv -c -f utf-8 -t ascii book-in2.tex > book.tex
|
iconv -c -f utf-8 -t ascii book-in2.tex > book.tex
|
||||||
|
|
||||||
|
# run pdflatex?
|
||||||
|
pdflatex -recorder book
|
||||||
|
# pdflatex -recorder book
|
||||||
|
|
||||||
exit
|
exit
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user