2022-10-31 17:50:29 +00:00
|
|
|
import math
|
|
|
|
|
2022-08-10 18:03:25 +00:00
|
|
|
import tensorflow as tf
|
|
|
|
|
2022-11-28 16:46:17 +00:00
|
|
|
|
2022-08-10 18:03:25 +00:00
|
|
|
class LossContrastive(tf.keras.losses.Loss):
|
2022-11-28 16:46:17 +00:00
|
|
|
"""Implements a contrastive loss function.
|
|
|
|
@warning: This does not function as it should.
|
|
|
|
Args:
|
|
|
|
weight_temperature (integer): The temperature weight (e.g. from LayerCheeseMultipleOut).
|
|
|
|
batch_size (integer): The batch size.
|
|
|
|
"""
|
2022-08-10 18:03:25 +00:00
|
|
|
def __init__(self, weight_temperature, batch_size):
|
|
|
|
super(LossContrastive, self).__init__()
|
|
|
|
self.batch_size = batch_size
|
|
|
|
self.weight_temperature = weight_temperature
|
|
|
|
|
|
|
|
def call(self, y_true, y_pred):
|
|
|
|
rainfall, water = tf.unstack(y_pred, axis=-2)
|
2022-09-06 18:48:46 +00:00
|
|
|
# print("LOSS:call y_true", y_true.shape)
|
|
|
|
# print("LOSS:call y_pred", y_pred.shape)
|
2022-10-31 17:50:29 +00:00
|
|
|
print("START rainfall", rainfall)
|
|
|
|
print("START water", water)
|
2022-08-10 18:03:25 +00:00
|
|
|
|
|
|
|
# # Ensure the shapes are defined
|
|
|
|
# rainfall = tf.reshape(rainfall, [self.batch_size, rainfall.shape[1]])
|
|
|
|
# water = tf.reshape(water, [self.batch_size, water.shape[1]])
|
|
|
|
|
2022-10-26 15:52:56 +00:00
|
|
|
# normalise features
|
2022-10-31 18:09:03 +00:00
|
|
|
# rainfall = rainfall / tf.math.l2_normalize(rainfall, axis=1)
|
|
|
|
# water = water / tf.math.l2_normalize(water, axis=1)
|
2022-10-26 15:52:56 +00:00
|
|
|
|
2022-10-31 17:50:29 +00:00
|
|
|
print("AFTER_L2 rainfall", rainfall)
|
|
|
|
print("AFTER_L2 water", water)
|
|
|
|
|
2022-10-26 15:45:45 +00:00
|
|
|
# logits = tf.linalg.matmul(rainfall, tf.transpose(water)) * tf.clip_by_value(tf.math.exp(self.weight_temperature), 0, 100)
|
|
|
|
logits = tf.linalg.matmul(rainfall, tf.transpose(water)) * tf.math.exp(self.weight_temperature)
|
2022-08-10 18:03:25 +00:00
|
|
|
|
2022-10-31 17:50:29 +00:00
|
|
|
print("LOGITS", logits)
|
2022-08-10 18:03:25 +00:00
|
|
|
|
2022-10-26 15:45:45 +00:00
|
|
|
# labels = tf.eye(self.batch_size, dtype=tf.int32) # we *would* do this if we were using mean squared error...
|
|
|
|
labels = tf.range(self.batch_size, dtype=tf.int32) # each row is a different category we think
|
|
|
|
loss_rainfall = tf.keras.metrics.sparse_categorical_crossentropy(labels, logits, from_logits=True, axis=0)
|
|
|
|
loss_water = tf.keras.metrics.sparse_categorical_crossentropy(labels, logits, from_logits=True, axis=1)
|
2022-10-31 17:50:29 +00:00
|
|
|
|
|
|
|
|
2022-10-26 15:45:45 +00:00
|
|
|
# loss_rainfall = tf.keras.metrics.binary_crossentropy(labels, logits, from_logits=True, axis=0)
|
|
|
|
# loss_water = tf.keras.metrics.binary_crossentropy(labels, logits, from_logits=True, axis=1)
|
2022-10-31 17:50:29 +00:00
|
|
|
print("LABELS", labels)
|
|
|
|
print("LOSS_RAINFALL", loss_rainfall)
|
|
|
|
print("LOSS_WATER", loss_water)
|
|
|
|
|
2022-08-10 18:03:25 +00:00
|
|
|
loss = (loss_rainfall + loss_water) / 2
|
|
|
|
|
2022-10-31 17:50:29 +00:00
|
|
|
print("LOSS", loss)
|
2022-10-31 18:19:43 +00:00
|
|
|
|
|
|
|
loss = tf.math.reduce_mean(loss)
|
|
|
|
|
|
|
|
print("LOSS FINAL", loss)
|
2022-08-10 18:03:25 +00:00
|
|
|
# cosine_similarity results in tensor of range -1 - 1, but tf.sparse.eye has range 0 - 1
|
2022-09-06 18:48:46 +00:00
|
|
|
# print("LABELS", labels)
|
|
|
|
# print("LOSS_rainfall", loss_rainfall)
|
|
|
|
# print("LOSS_water", loss_water)
|
|
|
|
# print("LOSS", loss)
|
2022-08-10 18:03:25 +00:00
|
|
|
return loss
|
2022-10-31 17:50:29 +00:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
weight_temperature = tf.Variable(name="loss_temperature", shape=1, initial_value=tf.constant([
|
|
|
|
math.log(1 / 0.07)
|
|
|
|
]))
|
|
|
|
loss = LossContrastive(weight_temperature=weight_temperature, batch_size=64)
|
|
|
|
|
|
|
|
tensor_input = tf.random.uniform([64, 2, 512])
|
2022-10-31 17:56:49 +00:00
|
|
|
print(loss(tf.constant(1), tensor_input))
|