mirror of
https://github.com/sbrl/research-rainfallradar
synced 2024-12-22 22:25:01 +00:00
debug party time
This commit is contained in:
parent
458faa96d2
commit
b986b069e2
2 changed files with 26 additions and 6 deletions
|
@ -44,4 +44,4 @@ class LayerCheeseMultipleOut(tf.keras.layers.Layer):
|
|||
|
||||
# By this point, the above has already dropped through the encoder, so should be in the form [ batch_size, dim ]
|
||||
|
||||
return tf.stack(inputs, axis=-2)
|
||||
return tf.stack(inputs, axis=-2)
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import math
|
||||
|
||||
import tensorflow as tf
|
||||
|
||||
class LossContrastive(tf.keras.losses.Loss):
|
||||
|
@ -10,8 +12,8 @@ class LossContrastive(tf.keras.losses.Loss):
|
|||
rainfall, water = tf.unstack(y_pred, axis=-2)
|
||||
# print("LOSS:call y_true", y_true.shape)
|
||||
# print("LOSS:call y_pred", y_pred.shape)
|
||||
# print("BEFORE_RESHAPE rainfall", rainfall)
|
||||
# print("BEFORE_RESHAPE water", water)
|
||||
print("START rainfall", rainfall)
|
||||
print("START water", water)
|
||||
|
||||
# # Ensure the shapes are defined
|
||||
# rainfall = tf.reshape(rainfall, [self.batch_size, rainfall.shape[1]])
|
||||
|
@ -21,24 +23,42 @@ class LossContrastive(tf.keras.losses.Loss):
|
|||
rainfall = rainfall / tf.math.l2_normalize(rainfall, axis=1)
|
||||
water = water / tf.math.l2_normalize(water, axis=1)
|
||||
|
||||
print("AFTER_L2 rainfall", rainfall)
|
||||
print("AFTER_L2 water", water)
|
||||
|
||||
# logits = tf.linalg.matmul(rainfall, tf.transpose(water)) * tf.clip_by_value(tf.math.exp(self.weight_temperature), 0, 100)
|
||||
logits = tf.linalg.matmul(rainfall, tf.transpose(water)) * tf.math.exp(self.weight_temperature)
|
||||
|
||||
# print("LOGITS", logits)
|
||||
print("LOGITS", logits)
|
||||
|
||||
# labels = tf.eye(self.batch_size, dtype=tf.int32) # we *would* do this if we were using mean squared error...
|
||||
labels = tf.range(self.batch_size, dtype=tf.int32) # each row is a different category we think
|
||||
loss_rainfall = tf.keras.metrics.sparse_categorical_crossentropy(labels, logits, from_logits=True, axis=0)
|
||||
loss_water = tf.keras.metrics.sparse_categorical_crossentropy(labels, logits, from_logits=True, axis=1)
|
||||
|
||||
|
||||
# loss_rainfall = tf.keras.metrics.binary_crossentropy(labels, logits, from_logits=True, axis=0)
|
||||
# loss_water = tf.keras.metrics.binary_crossentropy(labels, logits, from_logits=True, axis=1)
|
||||
|
||||
print("LABELS", labels)
|
||||
print("LOSS_RAINFALL", loss_rainfall)
|
||||
print("LOSS_WATER", loss_water)
|
||||
|
||||
loss = (loss_rainfall + loss_water) / 2
|
||||
|
||||
print("LOSS", loss)
|
||||
# cosine_similarity results in tensor of range -1 - 1, but tf.sparse.eye has range 0 - 1
|
||||
# print("LABELS", labels)
|
||||
# print("LOSS_rainfall", loss_rainfall)
|
||||
# print("LOSS_water", loss_water)
|
||||
# print("LOSS", loss)
|
||||
return loss
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
weight_temperature = tf.Variable(name="loss_temperature", shape=1, initial_value=tf.constant([
|
||||
math.log(1 / 0.07)
|
||||
]))
|
||||
loss = LossContrastive(weight_temperature=weight_temperature, batch_size=64)
|
||||
|
||||
tensor_input = tf.random.uniform([64, 2, 512])
|
||||
print(loss(tensor_input))
|
Loading…
Reference in a new issue