提交 afccd54d 编写于 作者: W Waleed Abdulla

Make gradient norm clipping a config option.

上级 555126ee
......@@ -87,7 +87,7 @@ class Config(object):
USE_MINI_MASK = True
MINI_MASK_SHAPE = (56, 56) # (height, width) of the mini-mask
# Input image resing
# Input image resizing
# Images are resized such that the smallest side is >= IMAGE_MIN_DIM and
# the longest side is <= IMAGE_MAX_DIM. In case both conditions can't
# be satisfied together the IMAGE_MAX_DIM is enforced.
......@@ -148,6 +148,9 @@ class Config(object):
# train the RPN.
USE_RPN_ROIS = True
# Gradient norm clipping
GRADIENT_CLIP_NORM = 5.0
def __init__(self):
"""Set values of computed attributes."""
# Effective batch size
......
......@@ -2063,7 +2063,7 @@ class MaskRCNN():
"""
# Optimizer object
optimizer = keras.optimizers.SGD(lr=learning_rate, momentum=momentum,
clipnorm=5.0)
clipnorm=self.config.GRADIENT_CLIP_NORM)
# Add Losses
# First, clear previously set losses to avoid duplication
self.keras_model._losses = []
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册