diff options
-rw-r--r-- | hyperparameters.py | 4 | ||||
-rw-r--r-- | losses.py | 2 |
2 files changed, 3 insertions, 3 deletions
diff --git a/hyperparameters.py b/hyperparameters.py index 80141fcf..6c82a745 100644 --- a/hyperparameters.py +++ b/hyperparameters.py @@ -9,14 +9,14 @@ Number of epochs. If you experiment with more complex networks you might need to increase this. Likewise if you add regularization that slows training. """ -num_epochs = 20000 +num_epochs = 7000 """ A critical parameter that can dramatically affect whether training succeeds or fails. The value for this depends significantly on which optimizer is used. Refer to the default learning rate parameter """ -learning_rate = 1e-2 +learning_rate = 2e-3 momentum = 0.01 @@ -34,7 +34,7 @@ class YourModel(tf.keras.Model): #print(self.content_image.shape, self.style_image.shape) - self.optimizer = tf.keras.optimizers.Adam() + self.optimizer = tf.keras.optimizers.Adam(hp.learning_rate) self.vgg16 = [ # Block 1 |