From 547f1f704f3fd005616c9391ba3b7b33e79c12b4 Mon Sep 17 00:00:00 2001 From: Waleed Abdulla Date: Fri, 10 Nov 2017 17:39:41 -0800 Subject: [PATCH] Use mean L2 regularization loss rather than sum. --- model.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/model.py b/model.py index 1dfbef8..d081466 100644 --- a/model.py +++ b/model.py @@ -1930,10 +1930,10 @@ class MaskRCNN(): self.keras_model.add_loss(tf.reduce_mean(layer.output, keep_dims=True)) # Add L2 Regularization - reg_losses = [keras.regularizers.l2(self.config.WEIGHT_DECAY)(w) - for w in self.keras_model.trainable_weights] + reg_losses = [keras.regularizers.l2(self.config.WEIGHT_DECAY)(w) / tf.cast(tf.size(w), tf.float32) + for w in self.keras_model.trainable_weights] self.keras_model.add_loss(tf.add_n(reg_losses)) - + # Compile self.keras_model.compile(optimizer=optimizer, loss=[None]*len(self.keras_model.outputs)) -- GitLab