未验证 提交 016f5ecb 编写于 作者: C cyber-pioneer 提交者: GitHub

[Prim]fix attrs loss in creating op (#50780)

* fix attrs loss in creating op

* add comment

* add case

* add case

* remove unused case setting
上级 2be69d05
...@@ -2886,6 +2886,8 @@ class Operator: ...@@ -2886,6 +2886,8 @@ class Operator:
self._type = type self._type = type
self.attrs = attrs if attrs else {} self.attrs = attrs if attrs else {}
else: else:
self.legacy_attrs = attrs if attrs else {}
self.block = block self.block = block
self.desc = desc self.desc = desc
# note: not add self.attrs here: # note: not add self.attrs here:
...@@ -3083,6 +3085,11 @@ class Operator: ...@@ -3083,6 +3085,11 @@ class Operator:
) )
self.desc.check_attrs() self.desc.check_attrs()
# record all attrs needed by creating op
for item in self.desc.attr_names():
self.legacy_attrs[item] = self.desc.attr(item)
if self._has_kernel(type): if self._has_kernel(type):
self.desc.infer_var_type(self.block.desc) self.desc.infer_var_type(self.block.desc)
self.desc.infer_shape(self.block.desc) self.desc.infer_shape(self.block.desc)
...@@ -3090,6 +3097,10 @@ class Operator: ...@@ -3090,6 +3097,10 @@ class Operator:
def _has_kernel(self, op_type): def _has_kernel(self, op_type):
return op_type not in self.OP_WITHOUT_KERNEL_SET return op_type not in self.OP_WITHOUT_KERNEL_SET
def _get_runtime_attrs(self):
"""Record all attrs needed by creating op. This api is only for to_prim process."""
return self.legacy_attrs
def to_string(self, throw_on_error): def to_string(self, throw_on_error):
""" """
Get debug string. Get debug string.
......
...@@ -11,3 +11,4 @@ endforeach() ...@@ -11,3 +11,4 @@ endforeach()
add_subdirectory(prim) add_subdirectory(prim)
add_subdirectory(model) add_subdirectory(model)
add_subdirectory(composite_ops) add_subdirectory(composite_ops)
add_subdirectory(process)
...@@ -3,12 +3,7 @@ file( ...@@ -3,12 +3,7 @@ file(
RELATIVE "${CMAKE_CURRENT_SOURCE_DIR}" RELATIVE "${CMAKE_CURRENT_SOURCE_DIR}"
"test_*.py") "test_*.py")
file(
GLOB TEST_OPS_GRAD
RELATIVE "${CMAKE_CURRENT_SOURCE_DIR}"
"test_*_grad.py")
string(REPLACE ".py" "" TEST_OPS "${TEST_OPS}") string(REPLACE ".py" "" TEST_OPS "${TEST_OPS}")
string(REPLACE ".py" "" TEST_OPS_GRAD "${TEST_OPS_GRAD}")
foreach(TEST_OP ${TEST_OPS}) foreach(TEST_OP ${TEST_OPS})
py_test_modules(${TEST_OP} MODULES ${TEST_OP} ENVS ${GC_ENVS}) py_test_modules(${TEST_OP} MODULES ${TEST_OP} ENVS ${GC_ENVS})
......
file(
GLOB TEST_OPS
RELATIVE "${CMAKE_CURRENT_SOURCE_DIR}"
"test_*.py")
string(REPLACE ".py" "" TEST_OPS "${TEST_OPS}")
foreach(TEST_OP ${TEST_OPS})
py_test_modules(${TEST_OP} MODULES ${TEST_OP} ENVS ${GC_ENVS})
endforeach()
# Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
import paddle
from paddle.fluid import core
paddle.framework.random._manual_program_seed(2023)
def fn(x):
dropout1 = paddle.nn.Dropout(p=0.5)
dropout2 = paddle.nn.Dropout(p=0.6)
y = dropout1(x)
z = dropout2(y)
return z
class TestCompositeCopyOp(unittest.TestCase):
"""This case is set to test copying op process even if some attrs of origin op has been blocked during constructing program."""
def cal_composite(self, inputs):
paddle.enable_static()
core._set_prim_forward_enabled(True)
startup_program = paddle.static.Program()
main_program = paddle.static.Program()
with paddle.static.program_guard(main_program, startup_program):
x = paddle.static.data(
'x', shape=inputs.shape, dtype=str(inputs.dtype)
)
y = fn(x)
blocks = main_program.blocks
fwd_ops = [op.type for op in blocks[0].ops]
# Ensure that dropout in original block
self.assertTrue('dropout' in fwd_ops)
paddle.incubate.autograd.to_prim(blocks)
fwd_ops_new = [op.type for op in blocks[0].ops]
# Ensure that dropout is not splitted into small ops
self.assertTrue('dropout' in fwd_ops_new)
exe = paddle.static.Executor()
exe.run(startup_program)
res = exe.run(main_program, feed={'x': inputs}, fetch_list=[y])
paddle.disable_static()
core._set_prim_forward_enabled(False)
return res
def test_forward(self):
core._set_prim_forward_blacklist("dropout")
np_data = np.random.random([16, 64, 128, 128]).astype("float32")
tensor_data = paddle.to_tensor(np_data)
expect = fn(tensor_data).numpy()
actual = self.cal_composite(np_data)[0]
assert expect.dtype == actual.dtype
np.testing.assert_allclose(
expect,
actual,
rtol=0,
atol=0,
)
if __name__ == '__main__':
unittest.main()
...@@ -656,8 +656,12 @@ def _lower_composite(block, blacklist=[]): ...@@ -656,8 +656,12 @@ def _lower_composite(block, blacklist=[]):
outputs[op.output_names[i]] = op.output(op.output_names[i]) outputs[op.output_names[i]] = op.output(op.output_names[i])
attrs = {} attrs = {}
for name in sorted(op.attr_names): # When copying op, all attrs defined in api should be kept.But op.attr_names is not complete here.
attrs[name] = op.attr(name) # Thus, all attrs should be got from init attrs of origin op.
runtime_attrs = op._get_runtime_attrs()
for name in runtime_attrs.keys():
attrs[name] = runtime_attrs[name]
from paddle.fluid.dygraph.base import param_guard from paddle.fluid.dygraph.base import param_guard
new_op_desc = block.desc.append_op() new_op_desc = block.desc.append_op()
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册