diff options
author | David Doan <daviddoan@davids-mbp-3.devices.brown.edu> | 2022-05-04 16:40:39 -0400 |
---|---|---|
committer | David Doan <daviddoan@davids-mbp-3.devices.brown.edu> | 2022-05-04 16:40:39 -0400 |
commit | c70019f1a6e03d4325881c5beeedb653a752c975 (patch) | |
tree | 192160d7cfc4a65f8bc27855a908220750929a41 /losses.py | |
parent | 6d4c7a0608a91e3a94d1b174aa9094616c778cb6 (diff) |
added a dataset, tried testing (unsuccessfully)
Diffstat (limited to 'losses.py')
-rw-r--r-- | losses.py | 16 |
1 files changed, 8 insertions, 8 deletions
@@ -1,6 +1,6 @@ import tensorflow as tf from tensorflow.keras.layers import \ - Conv2D, MaxPool2D, Dropout, Flatten, Dense, AveragePool2D + Conv2D, MaxPool2D, Dropout, Flatten, Dense, AveragePooling2D import numpy as np @@ -28,13 +28,13 @@ class YourModel(tf.keras.Model): activation="relu", name="block1_conv1"), Conv2D(64, 3, 1, padding="same", activation="relu", name="block1_conv2"), - AveragePool2D(2, name="block1_pool"), + AveragePooling2D(2, name="block1_pool"), # Block 2 Conv2D(128, 3, 1, padding="same", activation="relu", name="block2_conv1"), Conv2D(128, 3, 1, padding="same", activation="relu", name="block2_conv2"), - AveragePool2D(2, name="block2_pool"), + AveragePooling2D(2, name="block2_pool"), # Block 3 Conv2D(256, 3, 1, padding="same", activation="relu", name="block3_conv1"), @@ -42,7 +42,7 @@ class YourModel(tf.keras.Model): activation="relu", name="block3_conv2"), Conv2D(256, 3, 1, padding="same", activation="relu", name="block3_conv3"), - AveragePool2D(2, name="block3_pool"), + AveragePooling2D(2, name="block3_pool"), # Block 4 Conv2D(512, 3, 1, padding="same", activation="relu", name="block4_conv1"), @@ -50,7 +50,7 @@ class YourModel(tf.keras.Model): activation="relu", name="block4_conv2"), Conv2D(512, 3, 1, padding="same", activation="relu", name="block4_conv3"), - AveragePool2D(2, name="block4_pool"), + AveragePooling2D(2, name="block4_pool"), # Block 5 Conv2D(512, 3, 1, padding="same", activation="relu", name="block5_conv1"), @@ -58,7 +58,7 @@ class YourModel(tf.keras.Model): activation="relu", name="block5_conv2"), Conv2D(512, 3, 1, padding="same", activation="relu", name="block5_conv3"), - AveragePool2D(2, name="block5_pool"), + AveragePooling2D(2, name="block5_pool"), ] for layer in self.vgg16: layer.trainable = False @@ -71,7 +71,7 @@ class YourModel(tf.keras.Model): for layer in self.vgg16.layers: # pass the x through x = layer(x) - print("Sotech117 is so so sus") + # print("Sotech117 is so so sus") # save the output of each layer if it is in the desired list if layer.name in self.desired: @@ -126,7 +126,7 @@ class YourModel(tf.keras.Model): return L_style def train_step(self): - with tf.GradientTape as tape: + with tf.GradientTape() as tape: loss = self.loss_fn(self.content_image, self.style_image, self.x) gradients = tape.gradient(loss, self.x) self.optimizer.apply_gradients(zip(gradients, self.x)) |