From b888a4c57c371530b23e657052f145cad4f9db5f Mon Sep 17 00:00:00 2001 From: Hongyu Liu <43953930+phlrain@users.noreply.github.com> Date: Mon, 10 Jun 2019 10:42:53 +0800 Subject: [PATCH] fix regularizer lod bug (#17848) * fix regularizer lod bug; test=develop * fix exception bug and one_hot expand; test=develop --- python/paddle/fluid/clip.py | 2 ++ python/paddle/fluid/framework.py | 2 ++ python/paddle/fluid/layers/nn.py | 2 ++ python/paddle/fluid/regularizer.py | 14 ++++++++++---- 4 files changed, 16 insertions(+), 4 deletions(-) diff --git a/python/paddle/fluid/clip.py b/python/paddle/fluid/clip.py index 934ba8c0c09..1c51ef296c6 100644 --- a/python/paddle/fluid/clip.py +++ b/python/paddle/fluid/clip.py @@ -21,6 +21,7 @@ import functools from . import layers from . import framework from . import core +from .dygraph import not_support __all__ = [ 'ErrorClipByValue', @@ -335,6 +336,7 @@ class GradientClipByGlobalNorm(BaseGradientClipAttr): return param, new_grad +@not_support def set_gradient_clip(clip, param_list=None, program=None): """ To specify parameters that require gradient clip. diff --git a/python/paddle/fluid/framework.py b/python/paddle/fluid/framework.py index 53c9c83b2c4..4c0dd79500b 100644 --- a/python/paddle/fluid/framework.py +++ b/python/paddle/fluid/framework.py @@ -654,6 +654,8 @@ class Variable(object): @property def lod_level(self): # TODO(minqiyang): Support lod_level in dygraph mode + if in_dygraph_mode(): + raise Exception("Dygraph model DO NOT supprt lod") return self.desc.lod_level() @property diff --git a/python/paddle/fluid/layers/nn.py b/python/paddle/fluid/layers/nn.py index 6676b4a381a..fc010521c43 100644 --- a/python/paddle/fluid/layers/nn.py +++ b/python/paddle/fluid/layers/nn.py @@ -6576,6 +6576,7 @@ def one_hot(input, depth): inputs = {'X': input} attrs = {'depth': depth} else: + depth.stop_gradient = True inputs = {'X': input, 'depth_tensor': depth} attrs = {} helper.append_op( @@ -9383,6 +9384,7 @@ def expand(x, expand_times, name=None): new_expand_times = [] for ele in expand_times: if isinstance(ele, Variable): + ele.stop_gradient = True new_expand_times.append(ele) else: assert (isinstance(ele, int)) diff --git a/python/paddle/fluid/regularizer.py b/python/paddle/fluid/regularizer.py index 00f8fc815b3..822029a372b 100644 --- a/python/paddle/fluid/regularizer.py +++ b/python/paddle/fluid/regularizer.py @@ -162,8 +162,11 @@ class L2DecayRegularizer(WeightDecayRegularizer): assert isinstance(param, framework.Parameter) assert isinstance(block, framework.Block) - decay = block.create_var( - dtype=param.dtype, shape=param.shape, lod_level=param.lod_level) + if framework.in_dygraph_mode(): + decay = block.create_var(dtype=param.dtype, shape=param.shape) + else: + decay = block.create_var( + dtype=param.dtype, shape=param.shape, lod_level=param.lod_level) # Append Op to calculate decay block.append_op( @@ -231,8 +234,11 @@ class L1DecayRegularizer(WeightDecayRegularizer): assert isinstance(param, framework.Parameter) assert isinstance(block, framework.Block) - decay = block.create_var( - dtype=param.dtype, shape=param.shape, lod_level=param.lod_level) + if framework.in_dygraph_mode(): + decay = block.create_var(dtype=param.dtype, shape=param.shape) + else: + decay = block.create_var( + dtype=param.dtype, shape=param.shape, lod_level=param.lod_level) # Append sign op block.append_op( -- GitLab