debug party time

This commit is contained in:
Starbeamrainbowlabs 2022-10-31 17:50:29 +00:00
parent 458faa96d2
commit b986b069e2
Signed by: sbrl
GPG key ID: 1BE5172E637709C2
2 changed files with 26 additions and 6 deletions

View file

@ -1,3 +1,5 @@
import math
import tensorflow as tf import tensorflow as tf
class LossContrastive(tf.keras.losses.Loss): class LossContrastive(tf.keras.losses.Loss):
@ -10,8 +12,8 @@ class LossContrastive(tf.keras.losses.Loss):
rainfall, water = tf.unstack(y_pred, axis=-2) rainfall, water = tf.unstack(y_pred, axis=-2)
# print("LOSS:call y_true", y_true.shape) # print("LOSS:call y_true", y_true.shape)
# print("LOSS:call y_pred", y_pred.shape) # print("LOSS:call y_pred", y_pred.shape)
# print("BEFORE_RESHAPE rainfall", rainfall) print("START rainfall", rainfall)
# print("BEFORE_RESHAPE water", water) print("START water", water)
# # Ensure the shapes are defined # # Ensure the shapes are defined
# rainfall = tf.reshape(rainfall, [self.batch_size, rainfall.shape[1]]) # rainfall = tf.reshape(rainfall, [self.batch_size, rainfall.shape[1]])
@ -21,20 +23,29 @@ class LossContrastive(tf.keras.losses.Loss):
rainfall = rainfall / tf.math.l2_normalize(rainfall, axis=1) rainfall = rainfall / tf.math.l2_normalize(rainfall, axis=1)
water = water / tf.math.l2_normalize(water, axis=1) water = water / tf.math.l2_normalize(water, axis=1)
print("AFTER_L2 rainfall", rainfall)
print("AFTER_L2 water", water)
# logits = tf.linalg.matmul(rainfall, tf.transpose(water)) * tf.clip_by_value(tf.math.exp(self.weight_temperature), 0, 100) # logits = tf.linalg.matmul(rainfall, tf.transpose(water)) * tf.clip_by_value(tf.math.exp(self.weight_temperature), 0, 100)
logits = tf.linalg.matmul(rainfall, tf.transpose(water)) * tf.math.exp(self.weight_temperature) logits = tf.linalg.matmul(rainfall, tf.transpose(water)) * tf.math.exp(self.weight_temperature)
# print("LOGITS", logits) print("LOGITS", logits)
# labels = tf.eye(self.batch_size, dtype=tf.int32) # we *would* do this if we were using mean squared error... # labels = tf.eye(self.batch_size, dtype=tf.int32) # we *would* do this if we were using mean squared error...
labels = tf.range(self.batch_size, dtype=tf.int32) # each row is a different category we think labels = tf.range(self.batch_size, dtype=tf.int32) # each row is a different category we think
loss_rainfall = tf.keras.metrics.sparse_categorical_crossentropy(labels, logits, from_logits=True, axis=0) loss_rainfall = tf.keras.metrics.sparse_categorical_crossentropy(labels, logits, from_logits=True, axis=0)
loss_water = tf.keras.metrics.sparse_categorical_crossentropy(labels, logits, from_logits=True, axis=1) loss_water = tf.keras.metrics.sparse_categorical_crossentropy(labels, logits, from_logits=True, axis=1)
# loss_rainfall = tf.keras.metrics.binary_crossentropy(labels, logits, from_logits=True, axis=0) # loss_rainfall = tf.keras.metrics.binary_crossentropy(labels, logits, from_logits=True, axis=0)
# loss_water = tf.keras.metrics.binary_crossentropy(labels, logits, from_logits=True, axis=1) # loss_water = tf.keras.metrics.binary_crossentropy(labels, logits, from_logits=True, axis=1)
print("LABELS", labels)
print("LOSS_RAINFALL", loss_rainfall)
print("LOSS_WATER", loss_water)
loss = (loss_rainfall + loss_water) / 2 loss = (loss_rainfall + loss_water) / 2
print("LOSS", loss)
# cosine_similarity results in tensor of range -1 - 1, but tf.sparse.eye has range 0 - 1 # cosine_similarity results in tensor of range -1 - 1, but tf.sparse.eye has range 0 - 1
# print("LABELS", labels) # print("LABELS", labels)
# print("LOSS_rainfall", loss_rainfall) # print("LOSS_rainfall", loss_rainfall)
@ -42,3 +53,12 @@ class LossContrastive(tf.keras.losses.Loss):
# print("LOSS", loss) # print("LOSS", loss)
return loss return loss
if __name__ == "__main__":
weight_temperature = tf.Variable(name="loss_temperature", shape=1, initial_value=tf.constant([
math.log(1 / 0.07)
]))
loss = LossContrastive(weight_temperature=weight_temperature, batch_size=64)
tensor_input = tf.random.uniform([64, 2, 512])
print(loss(tensor_input))