Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
3ffcd693
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
3ffcd693
编写于
12月 30, 2022
作者:
姜
姜永久
提交者:
GitHub
12月 30, 2022
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Yj/rm legacy part 0 (#49424)
* rm legacy * clear in_legacy * fix tracer
上级
839e1499
变更
7
显示空白变更内容
内联
并排
Showing
7 changed file
with
71 addition
and
124 deletion
+71
-124
python/paddle/common_ops_import.py
python/paddle/common_ops_import.py
+0
-1
python/paddle/fluid/dygraph/tracer.py
python/paddle/fluid/dygraph/tracer.py
+1
-1
python/paddle/fluid/framework.py
python/paddle/fluid/framework.py
+30
-69
python/paddle/fluid/layers/layer_function_generator.py
python/paddle/fluid/layers/layer_function_generator.py
+0
-1
python/paddle/fluid/tests/unittests/op_test.py
python/paddle/fluid/tests/unittests/op_test.py
+3
-3
python/paddle/framework/__init__.py
python/paddle/framework/__init__.py
+0
-1
python/paddle/nn/functional/norm.py
python/paddle/nn/functional/norm.py
+37
-48
未找到文件。
python/paddle/common_ops_import.py
浏览文件 @
3ffcd693
...
@@ -24,7 +24,6 @@ from paddle.fluid.framework import ( # noqa: F401
...
@@ -24,7 +24,6 @@ from paddle.fluid.framework import ( # noqa: F401
OpProtoHolder
,
OpProtoHolder
,
Variable
,
Variable
,
_dygraph_tracer
,
_dygraph_tracer
,
_in_legacy_dygraph
,
_non_static_mode
,
_non_static_mode
,
_varbase_creator
,
_varbase_creator
,
convert_np_dtype_to_dtype_
,
convert_np_dtype_to_dtype_
,
...
...
python/paddle/fluid/dygraph/tracer.py
浏览文件 @
3ffcd693
...
@@ -306,7 +306,7 @@ class Tracer(core.Tracer):
...
@@ -306,7 +306,7 @@ class Tracer(core.Tracer):
stop_gradient
=
False
,
stop_gradient
=
False
,
inplace_map
=
None
,
inplace_map
=
None
,
):
):
if
not
framework
.
_in_legacy_dygraph
():
if
framework
.
in_dygraph_mode
():
# inputs : {"sum": [tensor], ...}
# inputs : {"sum": [tensor], ...}
# outputs : {"sum": [tensor], ...}
# outputs : {"sum": [tensor], ...}
if
type
in
name_mapping
.
keys
():
if
type
in
name_mapping
.
keys
():
...
...
python/paddle/fluid/framework.py
浏览文件 @
3ffcd693
...
@@ -98,11 +98,10 @@ _dy2st_enable_standalone_executor_ = os.environ.get(
...
@@ -98,11 +98,10 @@ _dy2st_enable_standalone_executor_ = os.environ.get(
# 2. dygraph_mode():
# 2. dygraph_mode():
# This flags inidicates we are now running in dygraph mode which called eager mode before.
# This flags inidicates we are now running in dygraph mode which called eager mode before.
# 3. _in_legacy_dygraph():
# 3. _in_legacy_dygraph():
# This flags
inidicates we are now running in legacy dygraph mode
# This flags
has been deprecated
#
#
# They have a relation ship as below:
# They have a relation ship as below:
# Both dygraph_mode and _in_legacy_dygraph are _non_static_mode, but if you are running in
# Since _in_legacy_graph is deprecated, so dygraph_mode is _non_static_mode
# dygraph mode means you are not in _in_legacy_dygraph.
#
#
# Why we have to make different of _in_legacy_dygraph and dygraph_mode?
# Why we have to make different of _in_legacy_dygraph and dygraph_mode?
# In some performance issue, we find that python if statement cause server performance problem
# In some performance issue, we find that python if statement cause server performance problem
...
@@ -237,10 +236,6 @@ def in_dygraph_mode():
...
@@ -237,10 +236,6 @@ def in_dygraph_mode():
return
(
_dygraph_tracer_
is
not
None
)
and
_in_eager_mode_
return
(
_dygraph_tracer_
is
not
None
)
and
_in_eager_mode_
def
_in_legacy_dygraph
():
return
(
not
_in_eager_mode_
)
and
(
_dygraph_tracer_
is
not
None
)
def
_non_static_mode
():
def
_non_static_mode
():
return
_dygraph_tracer_
is
not
None
return
_dygraph_tracer_
is
not
None
...
@@ -1334,8 +1329,6 @@ class VariableMetaClass(type):
...
@@ -1334,8 +1329,6 @@ class VariableMetaClass(type):
if
in_dygraph_mode
():
if
in_dygraph_mode
():
return
issubclass
(
t
,
core
.
eager
.
Tensor
)
return
issubclass
(
t
,
core
.
eager
.
Tensor
)
else
:
else
:
if
_in_legacy_dygraph
():
return
issubclass
(
t
,
core
.
VarBase
)
return
issubclass
(
t
,
Variable
)
return
issubclass
(
t
,
Variable
)
...
@@ -1346,8 +1339,6 @@ class ParameterMetaClass(VariableMetaClass):
...
@@ -1346,8 +1339,6 @@ class ParameterMetaClass(VariableMetaClass):
if
in_dygraph_mode
():
if
in_dygraph_mode
():
return
issubclass
(
t
,
EagerParamBase
)
return
issubclass
(
t
,
EagerParamBase
)
else
:
else
:
if
_in_legacy_dygraph
():
return
issubclass
(
t
,
ParamBase
)
return
issubclass
(
t
,
Parameter
)
return
issubclass
(
t
,
Parameter
)
...
@@ -3892,19 +3883,6 @@ class Block:
...
@@ -3892,19 +3883,6 @@ class Block:
regularizer
=
regularizer
,
regularizer
=
regularizer
,
error_clip
=
error_clip
,
error_clip
=
error_clip
,
)
)
else
:
if
_in_legacy_dygraph
():
var
=
ParamBase
(
d
.
shape
(),
d
.
dtype
(),
type
=
orig_var_type
,
name
=
new_name
,
stop_gradient
=
stop_gradient
,
trainable
=
trainable
,
optimize_attr
=
optimize_attr
,
regularizer
=
regularizer
,
error_clip
=
error_clip
,
)
else
:
else
:
var
=
Parameter
(
var
=
Parameter
(
self
,
self
,
...
@@ -3945,9 +3923,6 @@ class Block:
...
@@ -3945,9 +3923,6 @@ class Block:
param
=
None
param
=
None
if
in_dygraph_mode
():
if
in_dygraph_mode
():
param
=
EagerParamBase
(
*
args
,
**
kwargs
)
param
=
EagerParamBase
(
*
args
,
**
kwargs
)
else
:
if
_in_legacy_dygraph
():
param
=
ParamBase
(
*
args
,
**
kwargs
)
else
:
else
:
param
=
Parameter
(
global_block
,
*
args
,
**
kwargs
)
param
=
Parameter
(
global_block
,
*
args
,
**
kwargs
)
...
@@ -4261,20 +4236,6 @@ class Block:
...
@@ -4261,20 +4236,6 @@ class Block:
error_clip
=
p
.
error_clip
,
error_clip
=
p
.
error_clip
,
name
=
v
.
name
,
name
=
v
.
name
,
)
)
else
:
if
_in_legacy_dygraph
():
new_p
=
ParamBase
(
shape
=
v
.
shape
,
dtype
=
v
.
dtype
,
type
=
v
.
type
,
lod_level
=
v
.
lod_level
,
stop_gradient
=
p
.
stop_gradient
,
trainable
=
p
.
trainable
,
optimize_attr
=
p
.
optimize_attr
,
regularizer
=
p
.
regularizer
,
error_clip
=
p
.
error_clip
,
name
=
v
.
name
,
)
else
:
else
:
new_p
=
Parameter
(
new_p
=
Parameter
(
block
=
self
,
block
=
self
,
...
...
python/paddle/fluid/layers/layer_function_generator.py
浏览文件 @
3ffcd693
...
@@ -272,7 +272,6 @@ def generate_activation_fn(op_type):
...
@@ -272,7 +272,6 @@ def generate_activation_fn(op_type):
op
=
getattr
(
_C_ops
,
op_type
)
op
=
getattr
(
_C_ops
,
op_type
)
return
op
(
x
)
return
op
(
x
)
# TODO(dev): Because some ops' yaml has not been migrated.
# TODO(dev): Because some ops' yaml has not been migrated.
# Replace it with _in_legacy_dygraph while all yaml work is done.
if
in_dygraph_mode
()
and
hasattr
(
_legacy_C_ops
,
op_type
):
if
in_dygraph_mode
()
and
hasattr
(
_legacy_C_ops
,
op_type
):
op
=
getattr
(
_legacy_C_ops
,
op_type
)
op
=
getattr
(
_legacy_C_ops
,
op_type
)
return
op
(
x
)
return
op
(
x
)
...
...
python/paddle/fluid/tests/unittests/op_test.py
浏览文件 @
3ffcd693
...
@@ -38,8 +38,8 @@ from paddle.fluid.framework import (
...
@@ -38,8 +38,8 @@ from paddle.fluid.framework import (
_dygraph_tracer
,
_dygraph_tracer
,
_enable_legacy_dygraph
,
_enable_legacy_dygraph
,
_in_eager_without_dygraph_check
,
_in_eager_without_dygraph_check
,
_in_legacy_dygraph
,
_test_eager_guard
,
_test_eager_guard
,
in_dygraph_mode
,
)
)
from
paddle.fluid.op
import
Operator
from
paddle.fluid.op
import
Operator
from
paddle.jit.dy2static.utils
import
parse_arg_and_kwargs
from
paddle.jit.dy2static.utils
import
parse_arg_and_kwargs
...
@@ -716,7 +716,7 @@ class OpTest(unittest.TestCase):
...
@@ -716,7 +716,7 @@ class OpTest(unittest.TestCase):
if
if_return_inputs_grad_dict
:
if
if_return_inputs_grad_dict
:
v
.
stop_gradient
=
False
v
.
stop_gradient
=
False
if
not
_in_legacy_dygraph
(
):
if
hasattr
(
v
,
"retain_grads"
):
v
.
retain_grads
()
v
.
retain_grads
()
if
has_lod
:
if
has_lod
:
...
@@ -2515,7 +2515,7 @@ class OpTest(unittest.TestCase):
...
@@ -2515,7 +2515,7 @@ class OpTest(unittest.TestCase):
for
no_grad_val
in
no_grad_set
:
for
no_grad_val
in
no_grad_set
:
del
inputs
[
no_grad_val
]
del
inputs
[
no_grad_val
]
if
not
_in_legacy_dygraph
():
if
in_dygraph_mode
():
core
.
eager
.
run_backward
(
core
.
eager
.
run_backward
(
fluid
.
layers
.
utils
.
flatten
(
outputs
),
grad_outputs
,
False
fluid
.
layers
.
utils
.
flatten
(
outputs
),
grad_outputs
,
False
)
)
...
...
python/paddle/framework/__init__.py
浏览文件 @
3ffcd693
...
@@ -64,7 +64,6 @@ from ..fluid.framework import _dygraph_tracer # noqa: F401
...
@@ -64,7 +64,6 @@ from ..fluid.framework import _dygraph_tracer # noqa: F401
from
..fluid.layer_helper
import
LayerHelper
# noqa: F401
from
..fluid.layer_helper
import
LayerHelper
# noqa: F401
from
..fluid.framework
import
in_dygraph_mode
# noqa: F401
from
..fluid.framework
import
in_dygraph_mode
# noqa: F401
from
..fluid.framework
import
_in_legacy_dygraph
# noqa: F401
from
..fluid.framework
import
_global_flags
# noqa: F401
from
..fluid.framework
import
_global_flags
# noqa: F401
from
..fluid.framework
import
_apply_pass
# noqa: F401
from
..fluid.framework
import
_apply_pass
# noqa: F401
from
..fluid.framework
import
switch_main_program
from
..fluid.framework
import
switch_main_program
...
...
python/paddle/nn/functional/norm.py
浏览文件 @
3ffcd693
...
@@ -17,8 +17,8 @@ import numbers
...
@@ -17,8 +17,8 @@ import numbers
# TODO: define normalization api
# TODO: define normalization api
import
paddle
import
paddle
import
paddle.fluid
as
fluid
import
paddle.fluid
as
fluid
from
paddle
import
_C_ops
,
_legacy_C_ops
,
in_dynamic_mode
from
paddle
import
_C_ops
,
in_dynamic_mode
from
paddle.fluid.framework
import
_in_legacy_dygraph
,
in_dygraph_mode
from
paddle.fluid.framework
import
in_dygraph_mode
from
...fluid
import
dygraph_utils
from
...fluid
import
dygraph_utils
from
...fluid.data_feeder
import
check_type
,
check_variable_and_dtype
from
...fluid.data_feeder
import
check_type
,
check_variable_and_dtype
...
@@ -336,18 +336,7 @@ def layer_norm(
...
@@ -336,18 +336,7 @@ def layer_norm(
out
,
_
,
_
=
_C_ops
.
layer_norm
(
x
,
weight
,
bias
,
epsilon
,
begin_norm_axis
)
out
,
_
,
_
=
_C_ops
.
layer_norm
(
x
,
weight
,
bias
,
epsilon
,
begin_norm_axis
)
return
out
return
out
if
_in_legacy_dygraph
():
else
:
out
,
_
,
_
=
_legacy_C_ops
.
layer_norm
(
x
,
weight
,
bias
,
'epsilon'
,
epsilon
,
'begin_norm_axis'
,
begin_norm_axis
,
)
return
out
check_variable_and_dtype
(
check_variable_and_dtype
(
x
,
'input'
,
[
'float16'
,
'float32'
,
'float64'
],
'LayerNorm'
x
,
'input'
,
[
'float16'
,
'float32'
,
'float64'
],
'LayerNorm'
)
)
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录