diff options
-rw-r--r-- | __pycache__/hyperparameters.cpython-38.pyc | bin | 341 -> 341 bytes | |||
-rw-r--r-- | __pycache__/losses.cpython-38.pyc | bin | 4610 -> 4599 bytes | |||
-rw-r--r-- | hyperparameters.py | 6 | ||||
-rw-r--r-- | losses.py | 2 | ||||
-rw-r--r-- | save.jpg | bin | 25059 -> 46187 bytes |
5 files changed, 4 insertions, 4 deletions
diff --git a/__pycache__/hyperparameters.cpython-38.pyc b/__pycache__/hyperparameters.cpython-38.pyc Binary files differindex 40cba873..d0ec2a3e 100644 --- a/__pycache__/hyperparameters.cpython-38.pyc +++ b/__pycache__/hyperparameters.cpython-38.pyc diff --git a/__pycache__/losses.cpython-38.pyc b/__pycache__/losses.cpython-38.pyc Binary files differindex ff01b5a7..9cd2e604 100644 --- a/__pycache__/losses.cpython-38.pyc +++ b/__pycache__/losses.cpython-38.pyc diff --git a/hyperparameters.py b/hyperparameters.py index fedd4dd7..75d58118 100644 --- a/hyperparameters.py +++ b/hyperparameters.py @@ -9,7 +9,7 @@ Number of epochs. If you experiment with more complex networks you might need to increase this. Likewise if you add regularization that slows training. """ -num_epochs = 150 +num_epochs = 1000 """ A critical parameter that can dramatically affect whether training @@ -20,6 +20,6 @@ learning_rate = 1e-2 momentum = 0.01 -alpha = 1e1 +alpha = 1e-3 -beta = 1e2 +beta = 1e-1 @@ -34,7 +34,7 @@ class YourModel(tf.keras.Model): #print(self.content_image.shape, self.style_image.shape) - self.optimizer = tf.keras.optimizers.Adam(1e-2) + self.optimizer = tf.keras.optimizers.Adam() self.vgg16 = [ # Block 1 Binary files differ |