loss cel+dice: fixup

This commit is contained in:
Starbeamrainbowlabs 2023-01-13 18:21:11 +00:00
parent 0f0b691b5d
commit 37d1598b0b
Signed by: sbrl
GPG key ID: 1BE5172E637709C2

View file

@ -22,6 +22,7 @@ def dice_loss(y_true, y_pred):
class LossCrossEntropyDice(tf.keras.losses.Loss):
"""Cross-entropy loss and dice loss combined together into one nice neat package.
Combines the two with mean.
The ground truth labels should sparse, NOT one-hot. The predictions should be one-hot, NOT sparse.
@source https://lars76.github.io/2018/09/27/loss-functions-for-segmentation.html#9
"""
@ -30,6 +31,7 @@ class LossCrossEntropyDice(tf.keras.losses.Loss):
def call(self, y_true, y_pred):
y_true = tf.cast(y_true, tf.float32)
y_true = tf.one_hot(y_true, 2) # Input is sparse
o = tf.nn.sigmoid_cross_entropy_with_logits(y_true, y_pred) + dice_loss(y_true, y_pred)
return tf.reduce_mean(o)