From 590a93024673194bd434831e50b349f2753cdb42 Mon Sep 17 00:00:00 2001 From: NT Date: Fri, 9 Sep 2022 15:04:12 +0200 Subject: [PATCH] cleanup of SoL code --- diffphys-code-sol.ipynb | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/diffphys-code-sol.ipynb b/diffphys-code-sol.ipynb index 3d738b2..c2d5061 100644 --- a/diffphys-code-sol.ipynb +++ b/diffphys-code-sol.ipynb @@ -784,7 +784,7 @@ "source": [ "## Training\n", "\n", - "For the training, we use a standard Adam optimizer, and run 15 epochs by default. This should be increased for the larger network or to obtain more accurate results. For longer training runs, it would also be beneficial to decrease the learning rate over the course of the epochs, but for simplicity, we'll keep `LR` constant here.\n", + "For the training, we use a standard Adam optimizer, and run 5 epochs by default. This is a fairly low number to keep the runtime low. If you have the resources, 10 or 15 epochs will yield more accurate results. For even longer training runs and larger networks, it would also be beneficial to decrease the learning rate over the course of the epochs, but for simplicity, we'll keep `LR` constant here.\n", "\n", "Optionally, this is also the right point to load a network state to resume training." ] @@ -798,7 +798,7 @@ "outputs": [], "source": [ "LR = 1e-4\n", - "EPOCHS = 15\n", + "EPOCHS = 5\n", "\n", "opt = tf.keras.optimizers.Adam(learning_rate=LR) \n", "\n", @@ -855,7 +855,6 @@ ], "source": [ "steps = 0\n", - "EPOCHS=5 # NT_DEBUG\n", "for j in range(EPOCHS): # training\n", " dataset.newEpoch(exclude_tail=msteps)\n", " if j