dlr: add JIT_COMPILE

This commit is contained in:
Starbeamrainbowlabs 2023-05-04 18:22:18 +01:00
parent dddc08c663
commit 8593999eb6
Signed by: sbrl
GPG key ID: 1BE5172E637709C2
2 changed files with 6 additions and 3 deletions

View file

@ -42,7 +42,8 @@ show_help() {
echo -e " PATH_CHECKPOINT The path to a checkcpoint to load. If specified, a model will be loaded instead of being trained." >&2; echo -e " PATH_CHECKPOINT The path to a checkcpoint to load. If specified, a model will be loaded instead of being trained." >&2;
echo -e " LEARNING_RATE The learning rate to use. Default: 0.001." >&2; echo -e " LEARNING_RATE The learning rate to use. Default: 0.001." >&2;
echo -e " UPSAMPLE How much to upsample by at the beginning of the model. A value of disables upscaling. Default: 2." >&2; echo -e " UPSAMPLE How much to upsample by at the beginning of the model. A value of disables upscaling. Default: 2." >&2;
echo -e " STEPS_PER_EXECUTION How much to upsample by at the beginning of the model. A value of disables upscaling. Default: 2." >&2; echo -e " STEPS_PER_EXECUTION How many steps to perform before surfacing from the GPU to e.g. do callbacks. Default: 16." >&2;
echo -e " JIT_COMPILE Set to any value to compile the model with XLA." >&2;
echo -e " PREDICT_COUNT The number of items from the (SCRAMBLED) dataset to make a prediction for." >&2; echo -e " PREDICT_COUNT The number of items from the (SCRAMBLED) dataset to make a prediction for." >&2;
echo -e " POSTFIX Postfix to append to the output dir (auto calculated)." >&2; echo -e " POSTFIX Postfix to append to the output dir (auto calculated)." >&2;
echo -e " ARGS Optional. Any additional arguments to pass to the python program." >&2; echo -e " ARGS Optional. Any additional arguments to pass to the python program." >&2;
@ -73,7 +74,7 @@ echo -e ">>> DIR_OUTPUT: ${DIR_OUTPUT}";
echo -e ">>> Additional args: ${ARGS}"; echo -e ">>> Additional args: ${ARGS}";
export PATH=$HOME/software/bin:$PATH; export PATH=$HOME/software/bin:$PATH;
export IMAGE_SIZE BATCH_SIZE DIR_RAINFALLWATER PATH_HEIGHTMAP PATH_COLOURMAP STEPS_PER_EPOCH DIR_OUTPUT PATH_CHECKPOINT EPOCHS PREDICT_COUNT NO_REMOVE_ISOLATED_PIXELS LOSS LEARNING_RATE DICE_LOG_COSH WATER_THRESHOLD UPSAMPLE STEPS_PER_EXECUTION; export IMAGE_SIZE BATCH_SIZE DIR_RAINFALLWATER PATH_HEIGHTMAP PATH_COLOURMAP STEPS_PER_EPOCH DIR_OUTPUT PATH_CHECKPOINT EPOCHS PREDICT_COUNT NO_REMOVE_ISOLATED_PIXELS LOSS LEARNING_RATE DICE_LOG_COSH WATER_THRESHOLD UPSAMPLE STEPS_PER_EXECUTION JIT_COMPILE;
echo ">>> Installing requirements"; echo ">>> Installing requirements";
conda run -n py38 pip install -q -r requirements.txt; conda run -n py38 pip install -q -r requirements.txt;

View file

@ -53,6 +53,7 @@ WATER_THRESHOLD = float(os.environ["WATER_THRESHOLD"]) if "WATER_THRESHOLD" in o
UPSAMPLE = int(os.environ["UPSAMPLE"]) if "UPSAMPLE" in os.environ else 2 UPSAMPLE = int(os.environ["UPSAMPLE"]) if "UPSAMPLE" in os.environ else 2
STEPS_PER_EXECUTION = int(os.environ["STEPS_PER_EXECUTION"]) if "STEPS_PER_EXECUTION" in os.environ else 16 STEPS_PER_EXECUTION = int(os.environ["STEPS_PER_EXECUTION"]) if "STEPS_PER_EXECUTION" in os.environ else 16
JIT_COMPILE = True if "JIT_COMPILE" in os.environ else False
DIR_OUTPUT=os.environ["DIR_OUTPUT"] if "DIR_OUTPUT" in os.environ else f"output/{datetime.utcnow().date().isoformat()}_deeplabv3plus_rainfall_TEST" DIR_OUTPUT=os.environ["DIR_OUTPUT"] if "DIR_OUTPUT" in os.environ else f"output/{datetime.utcnow().date().isoformat()}_deeplabv3plus_rainfall_TEST"
PATH_CHECKPOINT = os.environ["PATH_CHECKPOINT"] if "PATH_CHECKPOINT" in os.environ else None PATH_CHECKPOINT = os.environ["PATH_CHECKPOINT"] if "PATH_CHECKPOINT" in os.environ else None
@ -228,7 +229,8 @@ if PATH_CHECKPOINT is None:
specificity # How many true negatives were accurately predicted? specificity # How many true negatives were accurately predicted?
# TODO: Add IoU, F1, Precision, Recall, here. # TODO: Add IoU, F1, Precision, Recall, here.
], ],
steps_per_execution=STEPS_PER_EXECUTION steps_per_execution=STEPS_PER_EXECUTION,
jit_compile=JIT_COMPILE
) )
logger.info(">>> Beginning training") logger.info(">>> Beginning training")
history = model.fit(dataset_train, history = model.fit(dataset_train,