mirror of
https://github.com/sbrl/research-rainfallradar
synced 2024-11-22 09:13:01 +00:00
dlr: add cross-entropy + dice loss fn option
This commit is contained in:
parent
b2a5acaf4e
commit
be77f035c8
3 changed files with 59 additions and 6 deletions
|
@ -30,10 +30,11 @@ show_help() {
|
||||||
echo -e " DIR_RAINFALLWATER The path to the directory the .tfrecord files containing the rainfall radar / water depth data." >&2;
|
echo -e " DIR_RAINFALLWATER The path to the directory the .tfrecord files containing the rainfall radar / water depth data." >&2;
|
||||||
echo -e " PATH_HEIGHTMAP The path to the heightmap jsonl file to read in." >&2;
|
echo -e " PATH_HEIGHTMAP The path to the heightmap jsonl file to read in." >&2;
|
||||||
echo -e " PATH_COLOURMAP The path to the colourmap for predictive purposes." >&2;
|
echo -e " PATH_COLOURMAP The path to the colourmap for predictive purposes." >&2;
|
||||||
echo -e " PATH_CHECKPOINT The path to a checkcpoint to load. If specified, a model will be loaded instead of being trained." >&2;
|
|
||||||
echo -e " STEPS_PER_EPOCH The number of steps to consider an epoch. Defaults to None, which means use the entire dataset." >&2;
|
echo -e " STEPS_PER_EPOCH The number of steps to consider an epoch. Defaults to None, which means use the entire dataset." >&2;
|
||||||
echo -e " NO_REMOVE_ISOLATED_PIXELS Set to any value to avoid the engine from removing isolated pixels - that is, water pixels with no other surrounding pixels, either side to side to diagonally." >&2;
|
echo -e " NO_REMOVE_ISOLATED_PIXELS Set to any value to avoid the engine from removing isolated pixels - that is, water pixels with no other surrounding pixels, either side to side to diagonally." >&2;
|
||||||
echo -e " EPOCHS The number of epochs to train for." >&2;
|
echo -e " EPOCHS The number of epochs to train for." >&2;
|
||||||
|
echo -e " LOSS The loss function to use. Default: cross-entropy (possible values: cross-entropy, cross-entropy-dice)." >&2;
|
||||||
|
echo -e " PATH_CHECKPOINT The path to a checkcpoint to load. If specified, a model will be loaded instead of being trained." >&2;
|
||||||
echo -e " PREDICT_COUNT The number of items from the (SCRAMBLED) dataset to make a prediction for." >&2;
|
echo -e " PREDICT_COUNT The number of items from the (SCRAMBLED) dataset to make a prediction for." >&2;
|
||||||
echo -e " POSTFIX Postfix to append to the output dir (auto calculated)." >&2;
|
echo -e " POSTFIX Postfix to append to the output dir (auto calculated)." >&2;
|
||||||
echo -e " ARGS Optional. Any additional arguments to pass to the python program." >&2;
|
echo -e " ARGS Optional. Any additional arguments to pass to the python program." >&2;
|
||||||
|
@ -60,7 +61,7 @@ DIR_OUTPUT="output/$(date -u --rfc-3339=date)_${CODE}";
|
||||||
echo -e ">>> Additional args: ${ARGS}";
|
echo -e ">>> Additional args: ${ARGS}";
|
||||||
|
|
||||||
export PATH=$HOME/software/bin:$PATH;
|
export PATH=$HOME/software/bin:$PATH;
|
||||||
export IMAGE_SIZE BATCH_SIZE DIR_RAINFALLWATER PATH_HEIGHTMAP PATH_COLOURMAP STEPS_PER_EPOCH DIR_OUTPUT PATH_CHECKPOINT EPOCHS PREDICT_COUNT NO_REMOVE_ISOLATED_PIXELS;
|
export IMAGE_SIZE BATCH_SIZE DIR_RAINFALLWATER PATH_HEIGHTMAP PATH_COLOURMAP STEPS_PER_EPOCH DIR_OUTPUT PATH_CHECKPOINT EPOCHS PREDICT_COUNT NO_REMOVE_ISOLATED_PIXELS LOSS;
|
||||||
|
|
||||||
echo ">>> Installing requirements";
|
echo ">>> Installing requirements";
|
||||||
conda run -n py38 pip install -q -r requirements.txt;
|
conda run -n py38 pip install -q -r requirements.txt;
|
||||||
|
|
|
@ -17,6 +17,7 @@ import matplotlib.pyplot as plt
|
||||||
import tensorflow as tf
|
import tensorflow as tf
|
||||||
|
|
||||||
from lib.dataset.dataset_mono import dataset_mono
|
from lib.dataset.dataset_mono import dataset_mono
|
||||||
|
from lib.ai.components.LossCrossEntropyDice import LossCrossEntropyDice
|
||||||
|
|
||||||
IMAGE_SIZE = int(os.environ["IMAGE_SIZE"]) if "IMAGE_SIZE" in os.environ else 128 # was 512; 128 is the highest power of 2 that fits the data
|
IMAGE_SIZE = int(os.environ["IMAGE_SIZE"]) if "IMAGE_SIZE" in os.environ else 128 # was 512; 128 is the highest power of 2 that fits the data
|
||||||
BATCH_SIZE = int(os.environ["BATCH_SIZE"]) if "BATCH_SIZE" in os.environ else 64
|
BATCH_SIZE = int(os.environ["BATCH_SIZE"]) if "BATCH_SIZE" in os.environ else 64
|
||||||
|
@ -27,12 +28,12 @@ PATH_COLOURMAP = os.environ["PATH_COLOURMAP"]
|
||||||
STEPS_PER_EPOCH = int(os.environ["STEPS_PER_EPOCH"]) if "STEPS_PER_EPOCH" in os.environ else None
|
STEPS_PER_EPOCH = int(os.environ["STEPS_PER_EPOCH"]) if "STEPS_PER_EPOCH" in os.environ else None
|
||||||
REMOVE_ISOLATED_PIXELS = FALSE if "NO_REMOVE_ISOLATED_PIXELS" in os.environ else True
|
REMOVE_ISOLATED_PIXELS = FALSE if "NO_REMOVE_ISOLATED_PIXELS" in os.environ else True
|
||||||
EPOCHS = int(os.environ["EPOCHS"]) if "EPOCHS" in os.environ else 25
|
EPOCHS = int(os.environ["EPOCHS"]) if "EPOCHS" in os.environ else 25
|
||||||
PREDICT_COUNT = int(os.environ["PREDICT_COUNT"]) if "PREDICT_COUNT" in os.environ else 4
|
LOSS = os.environ["LOSS"] if "LOSS" in os.environ else "cross-entropy"
|
||||||
|
|
||||||
|
|
||||||
DIR_OUTPUT=os.environ["DIR_OUTPUT"] if "DIR_OUTPUT" in os.environ else f"output/{datetime.utcnow().date().isoformat()}_deeplabv3plus_rainfall_TEST"
|
DIR_OUTPUT=os.environ["DIR_OUTPUT"] if "DIR_OUTPUT" in os.environ else f"output/{datetime.utcnow().date().isoformat()}_deeplabv3plus_rainfall_TEST"
|
||||||
|
|
||||||
PATH_CHECKPOINT = os.environ["PATH_CHECKPOINT"] if "PATH_CHECKPOINT" in os.environ else None
|
PATH_CHECKPOINT = os.environ["PATH_CHECKPOINT"] if "PATH_CHECKPOINT" in os.environ else None
|
||||||
|
PREDICT_COUNT = int(os.environ["PREDICT_COUNT"]) if "PREDICT_COUNT" in os.environ else 4
|
||||||
|
|
||||||
if not os.path.exists(DIR_OUTPUT):
|
if not os.path.exists(DIR_OUTPUT):
|
||||||
os.makedirs(os.path.join(DIR_OUTPUT, "checkpoints"))
|
os.makedirs(os.path.join(DIR_OUTPUT, "checkpoints"))
|
||||||
|
@ -45,7 +46,10 @@ logger.info(f"> PATH_COLOURMAP {PATH_COLOURMAP}")
|
||||||
logger.info(f"> STEPS_PER_EPOCH {STEPS_PER_EPOCH}")
|
logger.info(f"> STEPS_PER_EPOCH {STEPS_PER_EPOCH}")
|
||||||
logger.info(f"> REMOVE_ISOLATED_PIXELS {REMOVE_ISOLATED_PIXELS} [NO_REMOVE_ISOLATED_PIXELS]")
|
logger.info(f"> REMOVE_ISOLATED_PIXELS {REMOVE_ISOLATED_PIXELS} [NO_REMOVE_ISOLATED_PIXELS]")
|
||||||
logger.info(f"> EPOCHS {EPOCHS}")
|
logger.info(f"> EPOCHS {EPOCHS}")
|
||||||
|
logger.info(f"> LOSS {LOSS}")
|
||||||
|
|
||||||
logger.info(f"> DIR_OUTPUT {DIR_OUTPUT}")
|
logger.info(f"> DIR_OUTPUT {DIR_OUTPUT}")
|
||||||
|
|
||||||
logger.info(f"> PATH_CHECKPOINT {PATH_CHECKPOINT}")
|
logger.info(f"> PATH_CHECKPOINT {PATH_CHECKPOINT}")
|
||||||
logger.info(f"> PREDICT_COUNT {PREDICT_COUNT}")
|
logger.info(f"> PREDICT_COUNT {PREDICT_COUNT}")
|
||||||
|
|
||||||
|
@ -152,10 +156,17 @@ else:
|
||||||
# ██ ██ ██ ██ ██ ██ ██ ████ ██ ██ ████ ██████
|
# ██ ██ ██ ██ ██ ██ ██ ████ ██ ██ ████ ██████
|
||||||
|
|
||||||
if PATH_CHECKPOINT is None:
|
if PATH_CHECKPOINT is None:
|
||||||
loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)
|
loss_fn = None
|
||||||
|
if LOSS == "cross-entropy-dice":
|
||||||
|
loss_fn = LossCrossEntropyDice()
|
||||||
|
elif LOSS == "cross-entropy":
|
||||||
|
tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)
|
||||||
|
else:
|
||||||
|
raise Exception(f"Error: Unknown loss function '{LOSS}' (possible values: cross-entropy, cross-entropy-dice).")
|
||||||
|
|
||||||
model.compile(
|
model.compile(
|
||||||
optimizer=tf.keras.optimizers.Adam(learning_rate=0.001),
|
optimizer=tf.keras.optimizers.Adam(learning_rate=0.001),
|
||||||
loss=loss,
|
loss=loss_fn,
|
||||||
metrics=["accuracy"],
|
metrics=["accuracy"],
|
||||||
)
|
)
|
||||||
logger.info(">>> Beginning training")
|
logger.info(">>> Beginning training")
|
||||||
|
|
41
aimodel/src/lib/ai/components/LossCrossEntropyDice.py
Normal file
41
aimodel/src/lib/ai/components/LossCrossEntropyDice.py
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
import math
|
||||||
|
|
||||||
|
import tensorflow as tf
|
||||||
|
|
||||||
|
|
||||||
|
def dice_loss(y_true, y_pred):
|
||||||
|
"""Compute Dice loss.
|
||||||
|
@source https://lars76.github.io/2018/09/27/loss-functions-for-segmentation.html#9
|
||||||
|
Args:
|
||||||
|
y_true (tf.Tensor): The ground truth label.
|
||||||
|
y_pred (tf.Tensor): The output predicted by the model.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tf.Tensor: The computed Dice loss.
|
||||||
|
"""
|
||||||
|
y_pred = tf.math.sigmoid(y_pred)
|
||||||
|
numerator = 2 * tf.reduce_sum(y_true * y_pred)
|
||||||
|
denominator = tf.reduce_sum(y_true + y_pred)
|
||||||
|
|
||||||
|
return 1 - numerator / denominator
|
||||||
|
|
||||||
|
class LossCrossEntropyDice(tf.keras.losses.Loss):
|
||||||
|
"""Cross-entropy loss and dice loss combined together into one nice neat package.
|
||||||
|
Combines the two with mean.
|
||||||
|
@source https://lars76.github.io/2018/09/27/loss-functions-for-segmentation.html#9
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
super(LossDice, self).__init__(**kwargs)
|
||||||
|
|
||||||
|
def call(self, y_true, y_pred):
|
||||||
|
y_true = tf.cast(y_true, tf.float32)
|
||||||
|
o = tf.nn.sigmoid_cross_entropy_with_logits(y_true, y_pred) + dice_loss(y_true, y_pred)
|
||||||
|
return tf.reduce_mean(o)
|
||||||
|
|
||||||
|
def get_config(self):
|
||||||
|
config = super(LossDice, self).get_config()
|
||||||
|
config.update({
|
||||||
|
|
||||||
|
})
|
||||||
|
return config
|
Loading…
Reference in a new issue