diff options
author | Logan Bauman <logan_bauman@brown.edu> | 2022-05-07 08:12:20 -0400 |
---|---|---|
committer | Logan Bauman <logan_bauman@brown.edu> | 2022-05-07 08:12:20 -0400 |
commit | 10f2ece4ab4d8df526c5ff77958b25fe6f32344d (patch) | |
tree | 8ce1826fa3b64549dc04e0c3cfd2053aa9eb6dc6 /hyperparameters.py | |
parent | 718f2a66accddc9e1ff619c478e3cdce1b39333a (diff) |
hi
Diffstat (limited to 'hyperparameters.py')
-rw-r--r-- | hyperparameters.py | 6 |
1 files changed, 3 insertions, 3 deletions
diff --git a/hyperparameters.py b/hyperparameters.py index 73f4b497..fedd4dd7 100644 --- a/hyperparameters.py +++ b/hyperparameters.py @@ -9,7 +9,7 @@ Number of epochs. If you experiment with more complex networks you might need to increase this. Likewise if you add regularization that slows training. """ -num_epochs = 5000 +num_epochs = 150 """ A critical parameter that can dramatically affect whether training @@ -20,6 +20,6 @@ learning_rate = 1e-2 momentum = 0.01 -alpha = 1e-2 +alpha = 1e1 -beta = 1e-4 +beta = 1e2 |