LayerConvNeXtGamma: avoid adding an EagerTensor to config

Very weird how this is a problem when it wasn't before..
This commit is contained in:
Starbeamrainbowlabs 2022-10-12 17:12:07 +01:00
parent 32f5200d3b
commit 6423bf6702
Signed by: sbrl
GPG key ID: 1BE5172E637709C2
2 changed files with 14 additions and 15 deletions

View file

@ -3,18 +3,17 @@ import tensorflow as tf
# Code from https://github.com/leanderme/ConvNeXt-Tensorflow/blob/main/ConvNeXt.ipynb
class LayerConvNeXtGamma(tf.keras.layers.Layer):
def __init__(self, const_val = 1e-6, dim = None, name=None, **kwargs):
super(LayerConvNeXtGamma, self).__init__(name=name)
self.dim = dim
self.const = const_val * tf.ones((self.dim))
def __init__(self, const_val = 1e-6, dim = None, name=None, **kwargs):
super(LayerConvNeXtGamma, self).__init__(name=name)
self.dim = dim
self.const = const_val * tf.ones((self.dim))
def call(self, inputs, **kwargs):
return tf.multiply(inputs, self.const)
def get_config(self):
config = super(LayerConvNeXtGamma, self).get_config()
config.update({ "const": self.const, "dim": self.dim })
return config
def call(self, inputs, **kwargs):
return tf.multiply(inputs, self.const)
def get_config(self):
config = super(LayerConvNeXtGamma, self).get_config()
config.update({ "const": self.const.numpy(), "dim": self.dim })
return config

View file

@ -16,7 +16,7 @@ def make_callbacks(dirpath, model_predict):
model_to_checkpoint=model_predict,
filepath=os.path.join(
dirpath_checkpoints,
"checkpoint_weights_e{epoch:d}_loss{loss:.3f}.hdf5"
"checkpoint_e{epoch:d}_loss{loss:.3f}.hdf5"
),
monitor="loss"
),