LayerConvNeXtGamma: avoid adding an EagerTensor to config

Very weird how this is a problem when it wasn't before..
This commit is contained in:
Starbeamrainbowlabs 2022-10-12 17:12:07 +01:00
parent 32f5200d3b
commit 6423bf6702
Signed by: sbrl
GPG key ID: 1BE5172E637709C2
2 changed files with 14 additions and 15 deletions

View file

@ -15,6 +15,5 @@ class LayerConvNeXtGamma(tf.keras.layers.Layer):
def get_config(self):
config = super(LayerConvNeXtGamma, self).get_config()
config.update({ "const": self.const, "dim": self.dim })
config.update({ "const": self.const.numpy(), "dim": self.dim })
return config

View file

@ -16,7 +16,7 @@ def make_callbacks(dirpath, model_predict):
model_to_checkpoint=model_predict,
filepath=os.path.join(
dirpath_checkpoints,
"checkpoint_weights_e{epoch:d}_loss{loss:.3f}.hdf5"
"checkpoint_e{epoch:d}_loss{loss:.3f}.hdf5"
),
monitor="loss"
),