diff --git a/paddle/fluid/pybind/eager_method.cc b/paddle/fluid/pybind/eager_method.cc index e7978b8e23ac49e0d542abf96634f28961b6bdfd..847c2bd8b9d300b5b499d739aa97da4a569f6ed3 100644 --- a/paddle/fluid/pybind/eager_method.cc +++ b/paddle/fluid/pybind/eager_method.cc @@ -722,6 +722,33 @@ static PyObject* tensor_method_get_underline_selected_rows(TensorObject* self, EAGER_CATCH_AND_THROW_RETURN_NULL } +static PyObject* tensor_method__get_tensor_from_selected_rows( + TensorObject* self, PyObject* args, PyObject* kwargs) { + EAGER_TRY + PADDLE_ENFORCE(self->tensor.is_selected_rows(), + paddle::platform::errors::Fatal( + "this method is only effective for SelectedRows.")); + + auto* selected_rows = + static_cast(self->tensor.impl().get()); + + PADDLE_ENFORCE( + selected_rows->initialized(), + paddle::platform::errors::Fatal("SelectedRows must be initialized.")); + + auto* dense_tensor = static_cast( + selected_rows->mutable_value()); + VLOG(1) << "dense_tensor: " << dense_tensor->IsInitialized(); + + auto t = paddle::experimental::Tensor( + egr::Controller::Instance().GenerateUniqueName()); + t.set_impl(std::make_shared(*dense_tensor)); + + return ToPyObject(t); + + EAGER_CATCH_AND_THROW_RETURN_NULL +} + static PyObject* tensor__getitem_index_not_tensor(TensorObject* self, PyObject* args, PyObject* kwargs) { @@ -1852,6 +1879,10 @@ PyMethodDef variable_methods[] = { (PyCFunction)(void (*)(void))tensor_method_get_underline_selected_rows, METH_VARARGS | METH_KEYWORDS, NULL}, + {"_get_tensor_from_selected_rows", + (PyCFunction)(void (*)(void))tensor_method__get_tensor_from_selected_rows, + METH_VARARGS | METH_KEYWORDS, + NULL}, {"_getitem_index_not_tensor", (PyCFunction)(void (*)(void))tensor__getitem_index_not_tensor, METH_VARARGS | METH_KEYWORDS, diff --git a/python/paddle/fluid/clip.py b/python/paddle/fluid/clip.py index dd69630b53f426b7e1350ac0c101b011472a80ed..c912d7d8d406f409df7f8504a793d6931543d28c 100644 --- a/python/paddle/fluid/clip.py +++ b/python/paddle/fluid/clip.py @@ -71,13 +71,9 @@ def _squared_l2_norm(x): return sum_square if in_dygraph_mode(): - if x.is_selected_rows(): - new_x = paddle.to_tensor(x.numpy()) - return _C_ops.final_state_squared_l2_norm(new_x) return _C_ops.final_state_squared_l2_norm(x) - else: - if _in_legacy_dygraph(): - return _C_ops.squared_l2_norm(x) + elif _in_legacy_dygraph(): + return _C_ops.squared_l2_norm(x) op_type = 'squared_l2_norm' check_variable_and_dtype(x, 'x', ['float32', 'float64'], op_type) @@ -495,7 +491,12 @@ class ClipGradByGlobalNorm(ClipGradBase): if getattr(p, 'need_clip', True) is False: continue merge_grad = g - if g.type == core.VarDesc.VarType.SELECTED_ROWS: + + if in_dygraph_mode() and g.is_selected_rows(): + merge_grad = layers.merge_selected_rows(g) + merge_grad = merge_grad._get_tensor_from_selected_rows() + + elif g.type == core.VarDesc.VarType.SELECTED_ROWS: merge_grad = layers.merge_selected_rows(g) merge_grad = layers.get_tensor_from_selected_rows(merge_grad) diff --git a/python/paddle/fluid/layers/nn.py b/python/paddle/fluid/layers/nn.py index 53ab7ac05ca7a45a9e5f96fdf05c3ed58ff280d7..c59f361b92ef5e471ae0f6298bc1a732c2cae3ba 100755 --- a/python/paddle/fluid/layers/nn.py +++ b/python/paddle/fluid/layers/nn.py @@ -13150,6 +13150,8 @@ def merge_selected_rows(x, name=None): type=fluid.core.VarDesc.VarType.SELECTED_ROWS) y = fluid.layers.merge_selected_rows(var) """ + if _non_static_mode(): + return _C_ops.merge_selected_rows(x) helper = LayerHelper("merge_selected_rows", **locals()) out = helper.create_variable_for_type_inference(dtype=x.dtype)