diff --git a/aimodel/src/lib/ai/components/LayerCheeseMultipleOut.py b/aimodel/src/lib/ai/components/LayerCheeseMultipleOut.py index 3f9481a..5e6d13f 100644 --- a/aimodel/src/lib/ai/components/LayerCheeseMultipleOut.py +++ b/aimodel/src/lib/ai/components/LayerCheeseMultipleOut.py @@ -44,4 +44,4 @@ class LayerCheeseMultipleOut(tf.keras.layers.Layer): # By this point, the above has already dropped through the encoder, so should be in the form [ batch_size, dim ] - return tf.stack(inputs, axis=-2) \ No newline at end of file + return tf.stack(inputs, axis=-2) diff --git a/aimodel/src/lib/ai/components/LossContrastive.py b/aimodel/src/lib/ai/components/LossContrastive.py index 356147f..d54cf62 100644 --- a/aimodel/src/lib/ai/components/LossContrastive.py +++ b/aimodel/src/lib/ai/components/LossContrastive.py @@ -1,3 +1,5 @@ +import math + import tensorflow as tf class LossContrastive(tf.keras.losses.Loss): @@ -10,8 +12,8 @@ class LossContrastive(tf.keras.losses.Loss): rainfall, water = tf.unstack(y_pred, axis=-2) # print("LOSS:call y_true", y_true.shape) # print("LOSS:call y_pred", y_pred.shape) - # print("BEFORE_RESHAPE rainfall", rainfall) - # print("BEFORE_RESHAPE water", water) + print("START rainfall", rainfall) + print("START water", water) # # Ensure the shapes are defined # rainfall = tf.reshape(rainfall, [self.batch_size, rainfall.shape[1]]) @@ -21,24 +23,42 @@ class LossContrastive(tf.keras.losses.Loss): rainfall = rainfall / tf.math.l2_normalize(rainfall, axis=1) water = water / tf.math.l2_normalize(water, axis=1) + print("AFTER_L2 rainfall", rainfall) + print("AFTER_L2 water", water) + # logits = tf.linalg.matmul(rainfall, tf.transpose(water)) * tf.clip_by_value(tf.math.exp(self.weight_temperature), 0, 100) logits = tf.linalg.matmul(rainfall, tf.transpose(water)) * tf.math.exp(self.weight_temperature) - # print("LOGITS", logits) + print("LOGITS", logits) # labels = tf.eye(self.batch_size, dtype=tf.int32) # we *would* do this if we were using mean squared error... labels = tf.range(self.batch_size, dtype=tf.int32) # each row is a different category we think loss_rainfall = tf.keras.metrics.sparse_categorical_crossentropy(labels, logits, from_logits=True, axis=0) loss_water = tf.keras.metrics.sparse_categorical_crossentropy(labels, logits, from_logits=True, axis=1) + + # loss_rainfall = tf.keras.metrics.binary_crossentropy(labels, logits, from_logits=True, axis=0) # loss_water = tf.keras.metrics.binary_crossentropy(labels, logits, from_logits=True, axis=1) - + print("LABELS", labels) + print("LOSS_RAINFALL", loss_rainfall) + print("LOSS_WATER", loss_water) + loss = (loss_rainfall + loss_water) / 2 + print("LOSS", loss) # cosine_similarity results in tensor of range -1 - 1, but tf.sparse.eye has range 0 - 1 # print("LABELS", labels) # print("LOSS_rainfall", loss_rainfall) # print("LOSS_water", loss_water) # print("LOSS", loss) return loss - \ No newline at end of file + + +if __name__ == "__main__": + weight_temperature = tf.Variable(name="loss_temperature", shape=1, initial_value=tf.constant([ + math.log(1 / 0.07) + ])) + loss = LossContrastive(weight_temperature=weight_temperature, batch_size=64) + + tensor_input = tf.random.uniform([64, 2, 512]) + print(loss(tensor_input)) \ No newline at end of file