mirror of
https://github.com/sbrl/research-rainfallradar
synced 2024-12-22 22:25:01 +00:00
LayerConvNeXtGamma: avoid adding an EagerTensor to config
Very weird how this is a problem when it wasn't before..
This commit is contained in:
parent
32f5200d3b
commit
6423bf6702
2 changed files with 14 additions and 15 deletions
|
@ -3,18 +3,17 @@ import tensorflow as tf
|
|||
# Code from https://github.com/leanderme/ConvNeXt-Tensorflow/blob/main/ConvNeXt.ipynb
|
||||
|
||||
class LayerConvNeXtGamma(tf.keras.layers.Layer):
|
||||
def __init__(self, const_val = 1e-6, dim = None, name=None, **kwargs):
|
||||
super(LayerConvNeXtGamma, self).__init__(name=name)
|
||||
|
||||
self.dim = dim
|
||||
self.const = const_val * tf.ones((self.dim))
|
||||
def __init__(self, const_val = 1e-6, dim = None, name=None, **kwargs):
|
||||
super(LayerConvNeXtGamma, self).__init__(name=name)
|
||||
|
||||
self.dim = dim
|
||||
self.const = const_val * tf.ones((self.dim))
|
||||
|
||||
def call(self, inputs, **kwargs):
|
||||
return tf.multiply(inputs, self.const)
|
||||
|
||||
def get_config(self):
|
||||
config = super(LayerConvNeXtGamma, self).get_config()
|
||||
|
||||
config.update({ "const": self.const, "dim": self.dim })
|
||||
|
||||
return config
|
||||
def call(self, inputs, **kwargs):
|
||||
return tf.multiply(inputs, self.const)
|
||||
|
||||
def get_config(self):
|
||||
config = super(LayerConvNeXtGamma, self).get_config()
|
||||
|
||||
config.update({ "const": self.const.numpy(), "dim": self.dim })
|
||||
return config
|
||||
|
|
|
@ -16,7 +16,7 @@ def make_callbacks(dirpath, model_predict):
|
|||
model_to_checkpoint=model_predict,
|
||||
filepath=os.path.join(
|
||||
dirpath_checkpoints,
|
||||
"checkpoint_weights_e{epoch:d}_loss{loss:.3f}.hdf5"
|
||||
"checkpoint_e{epoch:d}_loss{loss:.3f}.hdf5"
|
||||
),
|
||||
monitor="loss"
|
||||
),
|
||||
|
|
Loading…
Reference in a new issue