Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
PaddleDetection
提交
16e73e0d
P
PaddleDetection
项目概览
PaddlePaddle
/
PaddleDetection
大约 1 年 前同步成功
通知
695
Star
11112
Fork
2696
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
184
列表
看板
标记
里程碑
合并请求
40
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PaddleDetection
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
184
Issue
184
列表
看板
标记
里程碑
合并请求
40
合并请求
40
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
16e73e0d
编写于
9月 26, 2018
作者:
W
Wu Yi
提交者:
GitHub
9月 26, 2018
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
hide operator API (#12543)
* hide operator API * update * update api.spec * fix merge * fix test
上级
7f9dab10
变更
13
隐藏空白更改
内联
并排
Showing
13 changed file
with
65 addition
and
82 deletion
+65
-82
paddle/contrib/float16/float16_transpiler.py
paddle/contrib/float16/float16_transpiler.py
+3
-3
paddle/fluid/API.spec
paddle/fluid/API.spec
+0
-17
paddle/fluid/framework/ir/graph_traits.cc
paddle/fluid/framework/ir/graph_traits.cc
+2
-0
paddle/fluid/pybind/protobuf.cc
paddle/fluid/pybind/protobuf.cc
+5
-5
python/paddle/fluid/backward.py
python/paddle/fluid/backward.py
+12
-12
python/paddle/fluid/clip.py
python/paddle/fluid/clip.py
+2
-2
python/paddle/fluid/framework.py
python/paddle/fluid/framework.py
+20
-22
python/paddle/fluid/tests/unittests/dist_transformer.py
python/paddle/fluid/tests/unittests/dist_transformer.py
+1
-1
python/paddle/fluid/tests/unittests/test_infer_shape.py
python/paddle/fluid/tests/unittests/test_infer_shape.py
+2
-2
python/paddle/fluid/tests/unittests/test_protobuf_descs.py
python/paddle/fluid/tests/unittests/test_protobuf_descs.py
+10
-10
python/paddle/fluid/transpiler/details/program_utils.py
python/paddle/fluid/transpiler/details/program_utils.py
+2
-2
python/paddle/fluid/transpiler/distribute_transpiler.py
python/paddle/fluid/transpiler/distribute_transpiler.py
+2
-2
python/paddle/fluid/transpiler/inference_transpiler.py
python/paddle/fluid/transpiler/inference_transpiler.py
+4
-4
未找到文件。
paddle/contrib/float16/float16_transpiler.py
浏览文件 @
16e73e0d
...
@@ -102,8 +102,8 @@ class Float16Transpiler:
...
@@ -102,8 +102,8 @@ class Float16Transpiler:
continue
continue
for
input_arg
in
current_op
.
input_arg_names
:
for
input_arg
in
current_op
.
input_arg_names
:
if
input_arg
in
self
.
input_map
:
if
input_arg
in
self
.
input_map
:
current_op
.
rename_input
(
input_arg
,
current_op
.
_
rename_input
(
input_arg
,
self
.
input_map
[
input_arg
])
self
.
input_map
[
input_arg
])
def
_remove_unused_var
(
self
):
def
_remove_unused_var
(
self
):
'''
'''
...
@@ -187,7 +187,7 @@ class Float16Transpiler:
...
@@ -187,7 +187,7 @@ class Float16Transpiler:
shape
=
var
.
shape
,
shape
=
var
.
shape
,
persistable
=
var
.
persistable
)
persistable
=
var
.
persistable
)
find_op
(
var
)
find_op
(
var
)
var
.
op
.
rename_output
(
var_name
,
tmp_var_name
)
var
.
op
.
_
rename_output
(
var_name
,
tmp_var_name
)
self
.
block
.
_insert_op
(
self
.
block
.
_insert_op
(
i
,
i
,
type
=
"cast"
,
type
=
"cast"
,
...
...
paddle/fluid/API.spec
浏览文件 @
16e73e0d
...
@@ -6,26 +6,9 @@ paddle.fluid.Program.global_block ArgSpec(args=['self'], varargs=None, keywords=
...
@@ -6,26 +6,9 @@ paddle.fluid.Program.global_block ArgSpec(args=['self'], varargs=None, keywords=
paddle.fluid.Program.list_vars ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Program.list_vars ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Program.parse_from_string ArgSpec(args=['binary_str'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Program.parse_from_string ArgSpec(args=['binary_str'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Program.to_string ArgSpec(args=['self', 'throw_on_error', 'with_details'], varargs=None, keywords=None, defaults=(False,))
paddle.fluid.Program.to_string ArgSpec(args=['self', 'throw_on_error', 'with_details'], varargs=None, keywords=None, defaults=(False,))
paddle.fluid.Operator.__init__ ArgSpec(args=['self', 'block', 'desc', 'type', 'inputs', 'outputs', 'attrs'], varargs=None, keywords=None, defaults=(None, None, None, None))
paddle.fluid.Operator.all_attrs ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Operator.attr ArgSpec(args=['self', 'name'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Operator.attr_type ArgSpec(args=['self', 'name'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Operator.block_attr ArgSpec(args=['self', 'name'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Operator.block_attr_id ArgSpec(args=['self', 'name'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Operator.blocks_attr ArgSpec(args=['self', 'name'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Operator.blocks_attr_ids ArgSpec(args=['self', 'name'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Operator.has_attr ArgSpec(args=['self', 'name'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Operator.has_kernel ArgSpec(args=['self', 'op_type'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Operator.input ArgSpec(args=['self', 'name'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Operator.output ArgSpec(args=['self', 'name'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Operator.rename_input ArgSpec(args=['self', 'old_name', 'new_name'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Operator.rename_output ArgSpec(args=['self', 'old_name', 'new_name'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Operator.set_attr ArgSpec(args=['self', 'name', 'val'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Operator.to_string ArgSpec(args=['self', 'throw_on_error'], varargs=None, keywords=None, defaults=None)
paddle.fluid.default_startup_program ArgSpec(args=[], varargs=None, keywords=None, defaults=None)
paddle.fluid.default_startup_program ArgSpec(args=[], varargs=None, keywords=None, defaults=None)
paddle.fluid.default_main_program ArgSpec(args=[], varargs=None, keywords=None, defaults=None)
paddle.fluid.default_main_program ArgSpec(args=[], varargs=None, keywords=None, defaults=None)
paddle.fluid.program_guard ArgSpec(args=[], varargs='args', keywords='kwds', defaults=None)
paddle.fluid.program_guard ArgSpec(args=[], varargs='args', keywords='kwds', defaults=None)
paddle.fluid.get_var ArgSpec(args=['name', 'program'], varargs=None, keywords=None, defaults=(None,))
paddle.fluid.name_scope ArgSpec(args=[], varargs='args', keywords='kwds', defaults=None)
paddle.fluid.name_scope ArgSpec(args=[], varargs='args', keywords='kwds', defaults=None)
paddle.fluid.Executor.__init__ ArgSpec(args=['self', 'place'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Executor.__init__ ArgSpec(args=['self', 'place'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Executor.close ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Executor.close ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
...
...
paddle/fluid/framework/ir/graph_traits.cc
浏览文件 @
16e73e0d
...
@@ -14,6 +14,8 @@
...
@@ -14,6 +14,8 @@
#include "paddle/fluid/framework/ir/graph_traits.h"
#include "paddle/fluid/framework/ir/graph_traits.h"
#include <vector>
namespace
paddle
{
namespace
paddle
{
namespace
framework
{
namespace
framework
{
namespace
ir
{
namespace
ir
{
...
...
paddle/fluid/pybind/protobuf.cc
浏览文件 @
16e73e0d
...
@@ -285,12 +285,12 @@ void BindOpDesc(pybind11::module *m) {
...
@@ -285,12 +285,12 @@ void BindOpDesc(pybind11::module *m) {
.
def
(
"set_output"
,
&
pd
::
OpDesc
::
SetOutput
)
.
def
(
"set_output"
,
&
pd
::
OpDesc
::
SetOutput
)
.
def
(
"input_arg_names"
,
&
pd
::
OpDesc
::
InputArgumentNames
)
.
def
(
"input_arg_names"
,
&
pd
::
OpDesc
::
InputArgumentNames
)
.
def
(
"output_arg_names"
,
&
pd
::
OpDesc
::
OutputArgumentNames
)
.
def
(
"output_arg_names"
,
&
pd
::
OpDesc
::
OutputArgumentNames
)
.
def
(
"rename_input"
,
&
pd
::
OpDesc
::
RenameInput
)
.
def
(
"
_
rename_input"
,
&
pd
::
OpDesc
::
RenameInput
)
.
def
(
"rename_output"
,
&
pd
::
OpDesc
::
RenameOutput
)
.
def
(
"
_
rename_output"
,
&
pd
::
OpDesc
::
RenameOutput
)
.
def
(
"has_attr"
,
&
pd
::
OpDesc
::
HasAttr
)
.
def
(
"has_attr"
,
&
pd
::
OpDesc
::
HasAttr
)
.
def
(
"attr_type"
,
&
pd
::
OpDesc
::
GetAttrType
)
.
def
(
"attr_type"
,
&
pd
::
OpDesc
::
GetAttrType
)
.
def
(
"attr_names"
,
&
pd
::
OpDesc
::
AttrNames
)
.
def
(
"attr_names"
,
&
pd
::
OpDesc
::
AttrNames
)
.
def
(
"set_attr"
,
&
pd
::
OpDesc
::
SetAttr
)
.
def
(
"
_
set_attr"
,
&
pd
::
OpDesc
::
SetAttr
)
.
def
(
"attr"
,
&
pd
::
OpDesc
::
GetAttr
)
.
def
(
"attr"
,
&
pd
::
OpDesc
::
GetAttr
)
.
def
(
"set_block_attr"
,
&
pd
::
OpDesc
::
SetBlockAttr
)
.
def
(
"set_block_attr"
,
&
pd
::
OpDesc
::
SetBlockAttr
)
.
def
(
"set_blocks_attr"
,
&
pd
::
OpDesc
::
SetBlocksAttr
)
.
def
(
"set_blocks_attr"
,
&
pd
::
OpDesc
::
SetBlocksAttr
)
...
@@ -300,8 +300,8 @@ void BindOpDesc(pybind11::module *m) {
...
@@ -300,8 +300,8 @@ void BindOpDesc(pybind11::module *m) {
std
::
string
ser
(
seriralized
);
std
::
string
ser
(
seriralized
);
self
.
SetAttr
(
name
,
ser
);
self
.
SetAttr
(
name
,
ser
);
})
})
.
def
(
"block_attr_id"
,
&
pd
::
OpDesc
::
GetBlockAttrId
)
.
def
(
"
_
block_attr_id"
,
&
pd
::
OpDesc
::
GetBlockAttrId
)
.
def
(
"blocks_attr_ids"
,
&
pd
::
OpDesc
::
GetBlocksAttrIds
)
.
def
(
"
_
blocks_attr_ids"
,
&
pd
::
OpDesc
::
GetBlocksAttrIds
)
.
def
(
"check_attrs"
,
&
pd
::
OpDesc
::
CheckAttrs
)
.
def
(
"check_attrs"
,
&
pd
::
OpDesc
::
CheckAttrs
)
.
def
(
"infer_shape"
,
&
pd
::
OpDesc
::
InferShape
)
.
def
(
"infer_shape"
,
&
pd
::
OpDesc
::
InferShape
)
.
def
(
"infer_var_type"
,
&
pd
::
OpDesc
::
InferVarType
)
.
def
(
"infer_var_type"
,
&
pd
::
OpDesc
::
InferVarType
)
...
...
python/paddle/fluid/backward.py
浏览文件 @
16e73e0d
...
@@ -38,8 +38,8 @@ def _rename_arg_(op_descs, old_name, new_name, begin_idx=None, end_idx=None):
...
@@ -38,8 +38,8 @@ def _rename_arg_(op_descs, old_name, new_name, begin_idx=None, end_idx=None):
op_desc
=
op_descs
[
i
]
op_desc
=
op_descs
[
i
]
if
isinstance
(
op_desc
,
tuple
):
if
isinstance
(
op_desc
,
tuple
):
op_desc
=
op_desc
[
0
]
op_desc
=
op_desc
[
0
]
op_desc
.
rename_input
(
old_name
,
new_name
)
op_desc
.
_
rename_input
(
old_name
,
new_name
)
op_desc
.
rename_output
(
old_name
,
new_name
)
op_desc
.
_
rename_output
(
old_name
,
new_name
)
def
_create_op_desc_
(
op_type
,
inputs
,
outputs
,
attrs
):
def
_create_op_desc_
(
op_type
,
inputs
,
outputs
,
attrs
):
...
@@ -70,7 +70,7 @@ def _create_op_desc_(op_type, inputs, outputs, attrs):
...
@@ -70,7 +70,7 @@ def _create_op_desc_(op_type, inputs, outputs, attrs):
if
isinstance
(
val
,
framework
.
Block
):
if
isinstance
(
val
,
framework
.
Block
):
op_desc
.
set_block_attr
(
name
,
val
.
desc
)
op_desc
.
set_block_attr
(
name
,
val
.
desc
)
else
:
else
:
op_desc
.
set_attr
(
name
,
val
)
op_desc
.
_
set_attr
(
name
,
val
)
return
op_desc
return
op_desc
...
@@ -346,7 +346,7 @@ def _append_backward_ops_(block,
...
@@ -346,7 +346,7 @@ def _append_backward_ops_(block,
grad_sub_block_list
=
[]
grad_sub_block_list
=
[]
# If the op has its own sub-block, deal with the sub-block first
# If the op has its own sub-block, deal with the sub-block first
if
op
.
has_attr
(
"sub_block"
):
if
op
.
has_attr
(
"sub_block"
):
sub_block
=
program
.
block
(
op
.
block_attr_id
(
"sub_block"
))
sub_block
=
program
.
block
(
op
.
_
block_attr_id
(
"sub_block"
))
grad_sub_block
=
program
.
_create_block
()
grad_sub_block
=
program
.
_create_block
()
grad_sub_block
.
_set_forward_block_idx
(
sub_block
.
idx
)
grad_sub_block
.
_set_forward_block_idx
(
sub_block
.
idx
)
cb
=
_callback_lookup_
(
op
)
cb
=
_callback_lookup_
(
op
)
...
@@ -382,7 +382,7 @@ def _append_backward_ops_(block,
...
@@ -382,7 +382,7 @@ def _append_backward_ops_(block,
for
op_desc
in
grad_op_descs
:
for
op_desc
in
grad_op_descs
:
new_op_desc
=
target_block
.
desc
.
append_op
()
new_op_desc
=
target_block
.
desc
.
append_op
()
new_op_desc
.
copy_from
(
op_desc
)
new_op_desc
.
copy_from
(
op_desc
)
new_op_desc
.
set_attr
(
op_role_attr_name
,
backward
)
new_op_desc
.
_
set_attr
(
op_role_attr_name
,
backward
)
grad_to_var
[
"__current_op_desc__"
]
=
new_op_desc
grad_to_var
[
"__current_op_desc__"
]
=
new_op_desc
if
callbacks
is
not
None
:
if
callbacks
is
not
None
:
assert
(
isinstance
(
callbacks
,
list
))
assert
(
isinstance
(
callbacks
,
list
))
...
@@ -408,7 +408,7 @@ def _append_backward_vars_(block, start_op_idx, grad_to_var, grad_info_map):
...
@@ -408,7 +408,7 @@ def _append_backward_vars_(block, start_op_idx, grad_to_var, grad_info_map):
for
op_idx
in
range
(
start_op_idx
,
block
.
desc
.
op_size
()):
for
op_idx
in
range
(
start_op_idx
,
block
.
desc
.
op_size
()):
op_desc
=
block
.
desc
.
op
(
op_idx
)
op_desc
=
block
.
desc
.
op
(
op_idx
)
if
op_desc
.
has_attr
(
"sub_block"
):
if
op_desc
.
has_attr
(
"sub_block"
):
sub_block
=
block
.
program
.
block
(
op_desc
.
block_attr_id
(
"sub_block"
))
sub_block
=
block
.
program
.
block
(
op_desc
.
_
block_attr_id
(
"sub_block"
))
_append_backward_vars_
(
sub_block
,
0
,
grad_to_var
,
grad_info_map
)
_append_backward_vars_
(
sub_block
,
0
,
grad_to_var
,
grad_info_map
)
new_vars
=
set
()
new_vars
=
set
()
# create new gradient variables
# create new gradient variables
...
@@ -438,12 +438,12 @@ def _rename_grad_(block, start_op_idx, grad_to_var, target_grad_map):
...
@@ -438,12 +438,12 @@ def _rename_grad_(block, start_op_idx, grad_to_var, target_grad_map):
op_desc
=
block
.
desc
.
op
(
op_idx
)
op_desc
=
block
.
desc
.
op
(
op_idx
)
for
name
in
op_desc
.
input_arg_names
():
for
name
in
op_desc
.
input_arg_names
():
if
name
in
var_map
:
if
name
in
var_map
:
op_desc
.
rename_input
(
name
,
var_map
[
name
])
op_desc
.
_
rename_input
(
name
,
var_map
[
name
])
for
name
in
op_desc
.
output_arg_names
():
for
name
in
op_desc
.
output_arg_names
():
if
block
.
desc
.
find_var
(
name
.
encode
(
"ascii"
)):
if
block
.
desc
.
find_var
(
name
.
encode
(
"ascii"
)):
new_name
=
unique_name
.
generate
(
name
)
new_name
=
unique_name
.
generate
(
name
)
op_desc
.
rename_output
(
name
,
new_name
)
op_desc
.
_
rename_output
(
name
,
new_name
)
var_map
[
name
]
=
new_name
var_map
[
name
]
=
new_name
for
g
,
ng
in
six
.
iteritems
(
var_map
):
for
g
,
ng
in
six
.
iteritems
(
var_map
):
...
@@ -542,9 +542,9 @@ def append_backward(loss, parameter_list=None, no_grad_set=None,
...
@@ -542,9 +542,9 @@ def append_backward(loss, parameter_list=None, no_grad_set=None,
if
loss
.
op
is
None
:
if
loss
.
op
is
None
:
raise
ValueError
(
"loss.op is None. Should not happend"
)
raise
ValueError
(
"loss.op is None. Should not happend"
)
loss
.
op
.
set_attr
(
core
.
op_proto_and_checker_maker
.
kOpRoleAttrName
(),
loss
.
op
.
_
set_attr
(
core
.
op_proto_and_checker_maker
.
kOpRoleAttrName
(),
int
(
core
.
op_proto_and_checker_maker
.
OpRole
.
Forward
)
|
int
(
core
.
op_proto_and_checker_maker
.
OpRole
.
Forward
)
|
int
(
core
.
op_proto_and_checker_maker
.
OpRole
.
Loss
))
int
(
core
.
op_proto_and_checker_maker
.
OpRole
.
Loss
))
if
callbacks
is
not
None
:
if
callbacks
is
not
None
:
isinstance
(
callbacks
,
list
)
isinstance
(
callbacks
,
list
)
...
@@ -631,7 +631,7 @@ def append_backward(loss, parameter_list=None, no_grad_set=None,
...
@@ -631,7 +631,7 @@ def append_backward(loss, parameter_list=None, no_grad_set=None,
attr_val
=
[
p
.
name
,
g
.
name
]
attr_val
=
[
p
.
name
,
g
.
name
]
if
g
.
op
.
has_attr
(
op_role_var_attr_name
):
if
g
.
op
.
has_attr
(
op_role_var_attr_name
):
attr_val
.
extend
(
g
.
op
.
attr
(
op_role_var_attr_name
))
attr_val
.
extend
(
g
.
op
.
attr
(
op_role_var_attr_name
))
g
.
op
.
set_attr
(
op_role_var_attr_name
,
attr_val
)
g
.
op
.
_
set_attr
(
op_role_var_attr_name
,
attr_val
)
return
params_and_grads
return
params_and_grads
...
...
python/paddle/fluid/clip.py
浏览文件 @
16e73e0d
...
@@ -75,8 +75,8 @@ class ErrorClipByValue(BaseErrorClipAttr):
...
@@ -75,8 +75,8 @@ class ErrorClipByValue(BaseErrorClipAttr):
clip_op_desc
.
set_type
(
"clip"
)
clip_op_desc
.
set_type
(
"clip"
)
clip_op_desc
.
set_input
(
"X"
,
[
grad_name
])
clip_op_desc
.
set_input
(
"X"
,
[
grad_name
])
clip_op_desc
.
set_output
(
"Out"
,
[
grad_name
])
clip_op_desc
.
set_output
(
"Out"
,
[
grad_name
])
clip_op_desc
.
set_attr
(
"min"
,
self
.
min
)
clip_op_desc
.
_
set_attr
(
"min"
,
self
.
min
)
clip_op_desc
.
set_attr
(
"max"
,
self
.
max
)
clip_op_desc
.
_
set_attr
(
"max"
,
self
.
max
)
def
error_clip_callback
(
block
,
context
):
def
error_clip_callback
(
block
,
context
):
...
...
python/paddle/fluid/framework.py
浏览文件 @
16e73e0d
...
@@ -40,11 +40,9 @@ PADDLE_ON_MODEL_CE = os.environ.get('PADDLE_ON_MODEL_CE', None) is not None
...
@@ -40,11 +40,9 @@ PADDLE_ON_MODEL_CE = os.environ.get('PADDLE_ON_MODEL_CE', None) is not None
__all__
=
[
__all__
=
[
'Program'
,
'Program'
,
'Operator'
,
'default_startup_program'
,
'default_startup_program'
,
'default_main_program'
,
'default_main_program'
,
'program_guard'
,
'program_guard'
,
'get_var'
,
'name_scope'
,
'name_scope'
,
]
]
...
@@ -663,11 +661,11 @@ class Operator(object):
...
@@ -663,11 +661,11 @@ class Operator(object):
self
.
_update_desc_attr
(
attr_name
,
attr_val
)
self
.
_update_desc_attr
(
attr_name
,
attr_val
)
self
.
desc
.
check_attrs
()
self
.
desc
.
check_attrs
()
if
self
.
has_kernel
(
type
):
if
self
.
_
has_kernel
(
type
):
self
.
desc
.
infer_var_type
(
self
.
block
.
desc
)
self
.
desc
.
infer_var_type
(
self
.
block
.
desc
)
self
.
desc
.
infer_shape
(
self
.
block
.
desc
)
self
.
desc
.
infer_shape
(
self
.
block
.
desc
)
def
has_kernel
(
self
,
op_type
):
def
_
has_kernel
(
self
,
op_type
):
return
op_type
not
in
self
.
OP_WITHOUT_KERNEL_SET
return
op_type
not
in
self
.
OP_WITHOUT_KERNEL_SET
def
to_string
(
self
,
throw_on_error
):
def
to_string
(
self
,
throw_on_error
):
...
@@ -708,7 +706,7 @@ class Operator(object):
...
@@ -708,7 +706,7 @@ class Operator(object):
"""
"""
return
self
.
desc
.
input
(
name
)
return
self
.
desc
.
input
(
name
)
def
rename_input
(
self
,
old_name
,
new_name
):
def
_
rename_input
(
self
,
old_name
,
new_name
):
"""
"""
Rename the `old_name` to `new_name`.
Rename the `old_name` to `new_name`.
...
@@ -719,9 +717,9 @@ class Operator(object):
...
@@ -719,9 +717,9 @@ class Operator(object):
Returns:
Returns:
None
None
"""
"""
self
.
desc
.
rename_input
(
old_name
,
new_name
)
self
.
desc
.
_
rename_input
(
old_name
,
new_name
)
def
rename_output
(
self
,
old_name
,
new_name
):
def
_
rename_output
(
self
,
old_name
,
new_name
):
"""
"""
Rename the `old_name` to `new_name`.
Rename the `old_name` to `new_name`.
...
@@ -732,7 +730,7 @@ class Operator(object):
...
@@ -732,7 +730,7 @@ class Operator(object):
Returns:
Returns:
None
None
"""
"""
self
.
desc
.
rename_output
(
old_name
,
new_name
)
self
.
desc
.
_
rename_output
(
old_name
,
new_name
)
@
property
@
property
def
input_names
(
self
):
def
input_names
(
self
):
...
@@ -796,7 +794,7 @@ class Operator(object):
...
@@ -796,7 +794,7 @@ class Operator(object):
"""
"""
return
self
.
desc
.
attr_type
(
name
)
return
self
.
desc
.
attr_type
(
name
)
def
set_attr
(
self
,
name
,
val
):
def
_
set_attr
(
self
,
name
,
val
):
"""
"""
Set the value of attribute by attribute's name.
Set the value of attribute by attribute's name.
...
@@ -829,7 +827,7 @@ class Operator(object):
...
@@ -829,7 +827,7 @@ class Operator(object):
isinstance
(
val
,
core
.
ProgramDesc
):
isinstance
(
val
,
core
.
ProgramDesc
):
self
.
desc
.
set_serialized_attr
(
name
,
val
.
serialize_to_string
())
self
.
desc
.
set_serialized_attr
(
name
,
val
.
serialize_to_string
())
else
:
else
:
self
.
desc
.
set_attr
(
name
,
val
)
self
.
desc
.
_
set_attr
(
name
,
val
)
@
property
@
property
def
attr_names
(
self
):
def
attr_names
(
self
):
...
@@ -848,7 +846,7 @@ class Operator(object):
...
@@ -848,7 +846,7 @@ class Operator(object):
"""
"""
return
self
.
desc
.
attr
(
name
)
return
self
.
desc
.
attr
(
name
)
def
block_attr_id
(
self
,
name
):
def
_
block_attr_id
(
self
,
name
):
"""
"""
Get the block attribute's id by name.
Get the block attribute's id by name.
...
@@ -858,9 +856,9 @@ class Operator(object):
...
@@ -858,9 +856,9 @@ class Operator(object):
Returns:
Returns:
int: the block index.
int: the block index.
"""
"""
return
self
.
desc
.
block_attr_id
(
name
)
return
self
.
desc
.
_
block_attr_id
(
name
)
def
block_attr
(
self
,
name
):
def
_
block_attr
(
self
,
name
):
"""
"""
Get the block attribute by name.
Get the block attribute by name.
...
@@ -871,11 +869,11 @@ class Operator(object):
...
@@ -871,11 +869,11 @@ class Operator(object):
block: the block attribute.
block: the block attribute.
"""
"""
id
=
self
.
block_attr_id
(
name
)
id
=
self
.
_
block_attr_id
(
name
)
assert
(
id
>=
0
and
id
<
len
(
self
.
block
.
program
.
blocks
))
assert
(
id
>=
0
and
id
<
len
(
self
.
block
.
program
.
blocks
))
return
self
.
block
.
program
.
blocks
[
id
]
return
self
.
block
.
program
.
blocks
[
id
]
def
blocks_attr
(
self
,
name
):
def
_
blocks_attr
(
self
,
name
):
"""
"""
Get the blocks attribute by name.
Get the blocks attribute by name.
...
@@ -886,13 +884,13 @@ class Operator(object):
...
@@ -886,13 +884,13 @@ class Operator(object):
list: list of the blocks attribute.
list: list of the blocks attribute.
"""
"""
attrs
=
[]
attrs
=
[]
for
i
in
self
.
blocks_attr_ids
(
name
):
for
i
in
self
.
_
blocks_attr_ids
(
name
):
assert
(
i
>=
0
and
i
<
len
(
self
.
block
.
program
.
blocks
))
assert
(
i
>=
0
and
i
<
len
(
self
.
block
.
program
.
blocks
))
attrs
.
append
(
self
.
block
.
program
.
blocks
[
i
])
attrs
.
append
(
self
.
block
.
program
.
blocks
[
i
])
return
attrs
return
attrs
def
blocks_attr_ids
(
self
,
name
):
def
_
blocks_attr_ids
(
self
,
name
):
"""
"""
Get the blocks attribute's ids by name.
Get the blocks attribute's ids by name.
...
@@ -903,7 +901,7 @@ class Operator(object):
...
@@ -903,7 +901,7 @@ class Operator(object):
list: list of the blocks ids.
list: list of the blocks ids.
"""
"""
return
self
.
desc
.
blocks_attr_ids
(
name
)
return
self
.
desc
.
_
blocks_attr_ids
(
name
)
def
all_attrs
(
self
):
def
all_attrs
(
self
):
"""
"""
...
@@ -917,11 +915,11 @@ class Operator(object):
...
@@ -917,11 +915,11 @@ class Operator(object):
for
n
in
attr_names
:
for
n
in
attr_names
:
attr_type
=
self
.
desc
.
attr_type
(
n
)
attr_type
=
self
.
desc
.
attr_type
(
n
)
if
attr_type
==
core
.
AttrType
.
BLOCK
:
if
attr_type
==
core
.
AttrType
.
BLOCK
:
attr_map
[
n
]
=
self
.
block_attr
(
n
)
attr_map
[
n
]
=
self
.
_
block_attr
(
n
)
continue
continue
if
attr_type
==
core
.
AttrType
.
BLOCKS
:
if
attr_type
==
core
.
AttrType
.
BLOCKS
:
attr_map
[
n
]
=
self
.
blocks_attr
(
n
)
attr_map
[
n
]
=
self
.
_
blocks_attr
(
n
)
continue
continue
attr_map
[
n
]
=
self
.
attr
(
n
)
attr_map
[
n
]
=
self
.
attr
(
n
)
...
@@ -1795,7 +1793,7 @@ class Program(object):
...
@@ -1795,7 +1793,7 @@ class Program(object):
for
j
in
six
.
moves
.
range
(
block
.
op_size
()):
for
j
in
six
.
moves
.
range
(
block
.
op_size
()):
op
=
block
.
op
(
j
)
op
=
block
.
op
(
j
)
if
op
.
has_attr
(
'is_test'
):
if
op
.
has_attr
(
'is_test'
):
op
.
set_attr
(
'is_test'
,
True
)
op
.
_
set_attr
(
'is_test'
,
True
)
res
.
blocks
=
[
res
.
blocks
=
[
Block
(
res
,
i
)
for
i
in
six
.
moves
.
range
(
res
.
desc
.
num_blocks
())
Block
(
res
,
i
)
for
i
in
six
.
moves
.
range
(
res
.
desc
.
num_blocks
())
]
]
...
@@ -2169,7 +2167,7 @@ def program_guard(main_program, startup_program=None):
...
@@ -2169,7 +2167,7 @@ def program_guard(main_program, startup_program=None):
switch_startup_program
(
startup_program
)
switch_startup_program
(
startup_program
)
def
get_var
(
name
,
program
=
None
):
def
_
get_var
(
name
,
program
=
None
):
"""
"""
Get a variable by name from the global block of a program.
Get a variable by name from the global block of a program.
...
...
python/paddle/fluid/tests/unittests/dist_transformer.py
浏览文件 @
16e73e0d
...
@@ -1488,7 +1488,7 @@ def wrap_decoder(trg_vocab_size,
...
@@ -1488,7 +1488,7 @@ def wrap_decoder(trg_vocab_size,
if
weight_sharing
:
if
weight_sharing
:
predict
=
layers
.
matmul
(
predict
=
layers
.
matmul
(
x
=
dec_output
,
x
=
dec_output
,
y
=
fluid
.
get_var
(
word_emb_param_names
[
0
]),
y
=
fluid
.
framework
.
_
get_var
(
word_emb_param_names
[
0
]),
transpose_y
=
True
)
transpose_y
=
True
)
else
:
else
:
predict
=
layers
.
fc
(
input
=
dec_output
,
predict
=
layers
.
fc
(
input
=
dec_output
,
...
...
python/paddle/fluid/tests/unittests/test_infer_shape.py
浏览文件 @
16e73e0d
...
@@ -76,8 +76,8 @@ class TestInferShape(unittest.TestCase):
...
@@ -76,8 +76,8 @@ class TestInferShape(unittest.TestCase):
mul_op_desc
.
set_input
(
"X"
,
[
"x"
])
mul_op_desc
.
set_input
(
"X"
,
[
"x"
])
mul_op_desc
.
set_input
(
"Y"
,
[
"y"
])
mul_op_desc
.
set_input
(
"Y"
,
[
"y"
])
mul_op_desc
.
set_output
(
"Out"
,
[
"out"
])
mul_op_desc
.
set_output
(
"Out"
,
[
"out"
])
mul_op_desc
.
set_attr
(
"x_num_col_dims"
,
1
)
mul_op_desc
.
_
set_attr
(
"x_num_col_dims"
,
1
)
mul_op_desc
.
set_attr
(
"y_num_col_dims"
,
1
)
mul_op_desc
.
_
set_attr
(
"y_num_col_dims"
,
1
)
mul_op_desc
.
check_attrs
()
mul_op_desc
.
check_attrs
()
mul_op_desc
.
infer_shape
(
block
)
mul_op_desc
.
infer_shape
(
block
)
...
...
python/paddle/fluid/tests/unittests/test_protobuf_descs.py
浏览文件 @
16e73e0d
...
@@ -38,40 +38,40 @@ class TestOpDesc(unittest.TestCase):
...
@@ -38,40 +38,40 @@ class TestOpDesc(unittest.TestCase):
self
.
assertEqual
([
'z'
],
op
.
output
(
"Out"
))
self
.
assertEqual
([
'z'
],
op
.
output
(
"Out"
))
self
.
assertEqual
([
"Out"
],
op
.
output_names
())
self
.
assertEqual
([
"Out"
],
op
.
output_names
())
op
.
set_attr
(
"int_attr"
,
1
)
op
.
_
set_attr
(
"int_attr"
,
1
)
self
.
assertEqual
(
1
,
op
.
attr
(
"int_attr"
))
self
.
assertEqual
(
1
,
op
.
attr
(
"int_attr"
))
self
.
assertTrue
(
op
.
has_attr
(
"int_attr"
))
self
.
assertTrue
(
op
.
has_attr
(
"int_attr"
))
self
.
assertEqual
(
core
.
AttrType
.
INT
,
op
.
attr_type
(
"int_attr"
))
self
.
assertEqual
(
core
.
AttrType
.
INT
,
op
.
attr_type
(
"int_attr"
))
op
.
set_attr
(
"float_attr"
,
-
1.32
)
op
.
_
set_attr
(
"float_attr"
,
-
1.32
)
self
.
assertAlmostEqual
(
-
1.32
,
op
.
attr
(
"float_attr"
),
delta
=
1e-4
)
self
.
assertAlmostEqual
(
-
1.32
,
op
.
attr
(
"float_attr"
),
delta
=
1e-4
)
self
.
assertTrue
(
op
.
has_attr
(
"float_attr"
))
self
.
assertTrue
(
op
.
has_attr
(
"float_attr"
))
op
.
set_attr
(
"bool_attr"
,
False
)
op
.
_
set_attr
(
"bool_attr"
,
False
)
self
.
assertFalse
(
op
.
attr
(
"bool_attr"
))
self
.
assertFalse
(
op
.
attr
(
"bool_attr"
))
op
.
set_attr
(
"string_attr"
,
"abc"
)
op
.
_
set_attr
(
"string_attr"
,
"abc"
)
self
.
assertEqual
(
"abc"
,
op
.
attr
(
"string_attr"
))
self
.
assertEqual
(
"abc"
,
op
.
attr
(
"string_attr"
))
self
.
assertTrue
(
op
.
has_attr
(
"string_attr"
))
self
.
assertTrue
(
op
.
has_attr
(
"string_attr"
))
op
.
set_attr
(
"ints_attr"
,
[
1
,
2
,
3
])
op
.
_
set_attr
(
"ints_attr"
,
[
1
,
2
,
3
])
self
.
assertEqual
([
1
,
2
,
3
],
op
.
attr
(
"ints_attr"
))
self
.
assertEqual
([
1
,
2
,
3
],
op
.
attr
(
"ints_attr"
))
expected
=
[
1.2
,
2.3
,
3.4
]
expected
=
[
1.2
,
2.3
,
3.4
]
op
.
set_attr
(
"floats_attr"
,
expected
)
op
.
_
set_attr
(
"floats_attr"
,
expected
)
for
e
,
a
in
zip
(
expected
,
op
.
attr
(
"floats_attr"
)):
for
e
,
a
in
zip
(
expected
,
op
.
attr
(
"floats_attr"
)):
self
.
assertAlmostEqual
(
e
,
a
,
delta
=
1e-4
)
self
.
assertAlmostEqual
(
e
,
a
,
delta
=
1e-4
)
op
.
set_attr
(
"strings_attr"
,
[
"a"
,
"b"
,
"c"
])
op
.
_
set_attr
(
"strings_attr"
,
[
"a"
,
"b"
,
"c"
])
self
.
assertEqual
([
"a"
,
"b"
,
"c"
],
op
.
attr
(
"strings_attr"
))
self
.
assertEqual
([
"a"
,
"b"
,
"c"
],
op
.
attr
(
"strings_attr"
))
op
.
set_attr
(
"bools_attr"
,
[
True
,
False
,
True
])
op
.
_
set_attr
(
"bools_attr"
,
[
True
,
False
,
True
])
self
.
assertEqual
([
True
,
False
,
True
],
op
.
attr
(
"bools_attr"
))
self
.
assertEqual
([
True
,
False
,
True
],
op
.
attr
(
"bools_attr"
))
self
.
assertEqual
(
8
,
len
(
op
.
attr_names
()))
self
.
assertEqual
(
8
,
len
(
op
.
attr_names
()))
op
.
set_block_attr
(
"block_attr"
,
program_desc
.
block
(
0
))
op
.
set_block_attr
(
"
_
block_attr"
,
program_desc
.
block
(
0
))
self
.
assertEqual
(
0
,
op
.
block_attr_id
(
"
block_attr"
))
self
.
assertEqual
(
0
,
op
.
_block_attr_id
(
"_
block_attr"
))
mul_op
=
block
.
append_op
()
mul_op
=
block
.
append_op
()
mul_op
.
set_type
(
"mul"
)
mul_op
.
set_type
(
"mul"
)
...
...
python/paddle/fluid/transpiler/details/program_utils.py
浏览文件 @
16e73e0d
...
@@ -128,7 +128,7 @@ def op_to_code(op):
...
@@ -128,7 +128,7 @@ def op_to_code(op):
attr_type
=
op
.
desc
.
attr_type
(
name
)
attr_type
=
op
.
desc
.
attr_type
(
name
)
if
attr_type
==
core
.
AttrType
.
BLOCK
:
if
attr_type
==
core
.
AttrType
.
BLOCK
:
a
=
"{name} = block[{value}]"
.
format
(
a
=
"{name} = block[{value}]"
.
format
(
name
=
name
,
type
=
attr_type
,
value
=
op
.
block_attr_id
(
name
))
name
=
name
,
type
=
attr_type
,
value
=
op
.
_
block_attr_id
(
name
))
attrs_str
+=
a
attrs_str
+=
a
if
i
!=
len
(
attr_names
)
-
1
:
if
i
!=
len
(
attr_names
)
-
1
:
attrs_str
+=
", "
attrs_str
+=
", "
...
@@ -136,7 +136,7 @@ def op_to_code(op):
...
@@ -136,7 +136,7 @@ def op_to_code(op):
if
attr_type
==
core
.
AttrType
.
BLOCKS
:
if
attr_type
==
core
.
AttrType
.
BLOCKS
:
a
=
"{name} = blocks{value}"
.
format
(
a
=
"{name} = blocks{value}"
.
format
(
name
=
name
,
type
=
attr_type
,
value
=
op
.
blocks_attr_ids
(
name
))
name
=
name
,
type
=
attr_type
,
value
=
op
.
_
blocks_attr_ids
(
name
))
attrs_str
+=
a
attrs_str
+=
a
if
i
!=
len
(
attr_names
)
-
1
:
if
i
!=
len
(
attr_names
)
-
1
:
attrs_str
+=
", "
attrs_str
+=
", "
...
...
python/paddle/fluid/transpiler/distribute_transpiler.py
浏览文件 @
16e73e0d
...
@@ -668,7 +668,7 @@ in a single call.")
...
@@ -668,7 +668,7 @@ in a single call.")
__clone_lr_op_sub_block__
(
cloned_op
,
program
,
new_sub_block
)
__clone_lr_op_sub_block__
(
cloned_op
,
program
,
new_sub_block
)
# reset the block of op
# reset the block of op
op
.
set_attr
(
'sub_block'
,
new_sub_block
)
op
.
_
set_attr
(
'sub_block'
,
new_sub_block
)
# append lr decay ops to the child block if exists
# append lr decay ops to the child block if exists
lr_ops
=
self
.
_get_lr_ops
()
lr_ops
=
self
.
_get_lr_ops
()
...
@@ -864,7 +864,7 @@ to transpile() call.")
...
@@ -864,7 +864,7 @@ to transpile() call.")
if
op
.
type
in
[
if
op
.
type
in
[
"gaussian_random"
,
"fill_constant"
,
"uniform_random"
"gaussian_random"
,
"fill_constant"
,
"uniform_random"
]:
]:
op
.
set_attr
(
"shape"
,
list
(
new_outputs
[
"Out"
].
shape
))
op
.
_
set_attr
(
"shape"
,
list
(
new_outputs
[
"Out"
].
shape
))
s_prog
.
global_block
().
append_op
(
s_prog
.
global_block
().
append_op
(
type
=
op
.
type
,
type
=
op
.
type
,
inputs
=
new_inputs
,
inputs
=
new_inputs
,
...
...
python/paddle/fluid/transpiler/inference_transpiler.py
浏览文件 @
16e73e0d
...
@@ -163,7 +163,7 @@ class InferenceTranspiler(object):
...
@@ -163,7 +163,7 @@ class InferenceTranspiler(object):
next_op
=
self
.
block
.
ops
[
i
+
1
]
next_op
=
self
.
block
.
ops
[
i
+
1
]
if
next_op
.
type
==
'relu'
:
if
next_op
.
type
==
'relu'
:
# modify bnorm OP to include relu
# modify bnorm OP to include relu
current_op
.
set_attr
(
"fuse_with_relu"
,
True
)
current_op
.
_
set_attr
(
"fuse_with_relu"
,
True
)
# remove relu OP
# remove relu OP
self
.
block
.
_remove_op
(
i
+
1
)
self
.
block
.
_remove_op
(
i
+
1
)
i
=
i
+
1
i
=
i
+
1
...
@@ -377,7 +377,7 @@ class InferenceTranspiler(object):
...
@@ -377,7 +377,7 @@ class InferenceTranspiler(object):
type
=
old_var
.
type
,
type
=
old_var
.
type
,
dtype
=
old_var
.
dtype
,
dtype
=
old_var
.
dtype
,
shape
=
old_var
.
shape
)
shape
=
old_var
.
shape
)
op
.
rename_input
(
old_param_name
,
new_param_name
)
op
.
_
rename_input
(
old_param_name
,
new_param_name
)
self
.
scope
.
var
(
new_param_name
)
self
.
scope
.
var
(
new_param_name
)
tensor
=
self
.
scope
.
find_var
(
new_param_name
).
get_tensor
()
tensor
=
self
.
scope
.
find_var
(
new_param_name
).
get_tensor
()
...
@@ -463,8 +463,8 @@ class InferenceTranspiler(object):
...
@@ -463,8 +463,8 @@ class InferenceTranspiler(object):
current_op
=
self
.
block
.
ops
[
i
]
current_op
=
self
.
block
.
ops
[
i
]
for
input_arg
in
current_op
.
input_arg_names
:
for
input_arg
in
current_op
.
input_arg_names
:
if
input_arg
in
self
.
input_map
:
if
input_arg
in
self
.
input_map
:
current_op
.
rename_input
(
input_arg
,
current_op
.
_
rename_input
(
input_arg
,
self
.
input_map
[
input_arg
])
self
.
input_map
[
input_arg
])
def
_remove_unused_var
(
self
):
def
_remove_unused_var
(
self
):
'''
'''
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录