未验证 提交 5b3f91df 编写于 作者: L Li Min 提交者: GitHub

Replace with dygraph op calling method. (#44331)

* Replace with dygraph op calling method.
上级 32b3469a
...@@ -376,8 +376,11 @@ def monkey_patch_math_varbase(): ...@@ -376,8 +376,11 @@ def monkey_patch_math_varbase():
if framework._in_eager_mode_ else if framework._in_eager_mode_ else
('__rtruediv__', ('__rtruediv__',
_binary_creator_('rtruediv__', 'elementwise_div', True, None)), _binary_creator_('rtruediv__', 'elementwise_div', True, None)),
('__pow__', _binary_creator_('__pow__', 'elementwise_pow', False, ('__pow__',
None)), _binary_creator_('__pow__', 'final_state_elementwise_pow', False, None,
True)) if framework._in_eager_mode_ else
('__pow__',
_binary_creator_('__pow__', 'elementwise_pow', False, None)),
('__rpow__', _binary_creator_('__rpow__', 'elementwise_pow', True, ('__rpow__', _binary_creator_('__rpow__', 'elementwise_pow', True,
None)), None)),
('__floordiv__', ('__floordiv__',
......
...@@ -1776,7 +1776,7 @@ class TestEagerTensorGradNameValue(unittest.TestCase): ...@@ -1776,7 +1776,7 @@ class TestEagerTensorGradNameValue(unittest.TestCase):
b = a**2 b = a**2
self.assertEqual(a._grad_value(), None) self.assertEqual(a._grad_value(), None)
b.backward() b.backward()
self.assertEqual('eager_in_tmp' in a._grad_name(), True) # Note, for new dygraph, there are no generated grad name, so we skip the name check.
self.assertNotEqual(a._grad_value(), None) self.assertNotEqual(a._grad_value(), None)
......
...@@ -663,8 +663,12 @@ def eye(num_rows, num_columns=None, dtype=None, name=None): ...@@ -663,8 +663,12 @@ def eye(num_rows, num_columns=None, dtype=None, name=None):
num_columns = num_rows num_columns = num_rows
if _non_static_mode(): if _non_static_mode():
out = _C_ops.eye('dtype', dtype, 'num_rows', num_rows, 'num_columns', if in_dygraph_mode():
num_columns) out = _C_ops.final_state_eye(num_rows, num_columns, dtype,
_current_expected_place())
elif _in_legacy_dygraph():
out = _C_ops.eye('dtype', dtype, 'num_rows', num_rows,
'num_columns', num_columns)
else: else:
helper = LayerHelper("eye", **locals()) helper = LayerHelper("eye", **locals())
......
...@@ -2705,8 +2705,7 @@ def scatter_nd_add(x, index, updates, name=None): ...@@ -2705,8 +2705,7 @@ def scatter_nd_add(x, index, updates, name=None):
# [3, 5, 9, 10] # [3, 5, 9, 10]
""" """
if in_dygraph_mode(): if in_dygraph_mode():
op = getattr(_C_ops, 'scatter_nd_add') return _C_ops.final_state_scatter_nd_add(x, index, updates)
return op(x, index, updates)
else: else:
if _in_legacy_dygraph(): if _in_legacy_dygraph():
op = getattr(_C_ops, 'scatter_nd_add') op = getattr(_C_ops, 'scatter_nd_add')
...@@ -3002,7 +3001,7 @@ def broadcast_to(x, shape, name=None): ...@@ -3002,7 +3001,7 @@ def broadcast_to(x, shape, name=None):
# [[1, 2, 3], [1, 2, 3]] # [[1, 2, 3], [1, 2, 3]]
""" """
if paddle.in_dynamic_mode(): if paddle.in_dynamic_mode():
return _C_ops.expand_v2(x, 'shape', shape) return _C_ops.final_state_expand(x, shape)
if isinstance(shape, Variable): if isinstance(shape, Variable):
assert len(shape.shape) == 1, ('shape must be an 1-D Tensor.') assert len(shape.shape) == 1, ('shape must be an 1-D Tensor.')
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册