Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
机器未来
Paddle
提交
dea52631
P
Paddle
项目概览
机器未来
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
dea52631
编写于
1月 05, 2018
作者:
F
fengjiayi
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
update error clip
上级
4ead8e1b
变更
3
隐藏空白更改
内联
并排
Showing
3 changed file
with
42 addition
and
21 deletion
+42
-21
python/paddle/v2/fluid/backward.py
python/paddle/v2/fluid/backward.py
+9
-3
python/paddle/v2/fluid/clip.py
python/paddle/v2/fluid/clip.py
+20
-18
python/paddle/v2/fluid/framework.py
python/paddle/v2/fluid/framework.py
+13
-0
未找到文件。
python/paddle/v2/fluid/backward.py
浏览文件 @
dea52631
...
@@ -190,8 +190,15 @@ def _append_backward_ops_(target,
...
@@ -190,8 +190,15 @@ def _append_backward_ops_(target,
val(str): corresponding forward variable name
val(str): corresponding forward variable name
callback(callable object): a callable object used to decorate new generated grad ops
callback(callable object): a callable object used to decorate new generated grad ops
"""
"""
if
callback
is
not
None
and
not
hasattr
(
callback
,
'__call__'
):
if
callback
is
None
:
def
empty_callback
(
block
):
pass
callback
=
empty_callback
elif
not
hasattr
(
callback
,
'__call__'
):
raise
ValueError
(
"'callback' must be a callable object."
)
raise
ValueError
(
"'callback' must be a callable object."
)
# grad_op_descs holds created grad_op, and will be appended to target_block
# grad_op_descs holds created grad_op, and will be appended to target_block
grad_op_descs
=
[]
grad_op_descs
=
[]
program
=
block
.
program
program
=
block
.
program
...
@@ -208,8 +215,6 @@ def _append_backward_ops_(target,
...
@@ -208,8 +215,6 @@ def _append_backward_ops_(target,
# Getting op's corresponding grad_op
# Getting op's corresponding grad_op
grad_op_desc
,
op_grad_to_var
=
core
.
get_grad_op_desc
(
grad_op_desc
,
op_grad_to_var
=
core
.
get_grad_op_desc
(
op
.
desc
,
no_grad_dict
[
block
.
idx
],
grad_sub_block_list
)
op
.
desc
,
no_grad_dict
[
block
.
idx
],
grad_sub_block_list
)
if
callback
is
not
None
:
grad_op_desc
=
callback
(
grad_op_desc
)
grad_op_descs
.
extend
(
grad_op_desc
)
grad_op_descs
.
extend
(
grad_op_desc
)
grad_to_var
.
update
(
op_grad_to_var
)
grad_to_var
.
update
(
op_grad_to_var
)
...
@@ -230,6 +235,7 @@ def _append_backward_ops_(target,
...
@@ -230,6 +235,7 @@ def _append_backward_ops_(target,
for
op_desc
in
grad_op_descs
:
for
op_desc
in
grad_op_descs
:
new_op_desc
=
target_block
.
desc
.
append_op
()
new_op_desc
=
target_block
.
desc
.
append_op
()
new_op_desc
.
copy_from
(
op_desc
)
new_op_desc
.
copy_from
(
op_desc
)
callback
(
block
=
target_block
,
context
=
grad_to_var
)
def
_append_backward_vars_
(
block
,
start_op_idx
,
grad_to_var
,
grad_info_map
):
def
_append_backward_vars_
(
block
,
start_op_idx
,
grad_to_var
,
grad_info_map
):
...
...
python/paddle/v2/fluid/clip.py
浏览文件 @
dea52631
...
@@ -6,18 +6,9 @@ __all__ = ['GradientClipByValue', 'append_gradient_clip_ops']
...
@@ -6,18 +6,9 @@ __all__ = ['GradientClipByValue', 'append_gradient_clip_ops']
class
BaseErrorClipAttr
(
object
):
class
BaseErrorClipAttr
(
object
):
def
create_clip_op_desc
(
self
,
grad_name
):
def
append_clip_op
(
self
,
block
,
grad_name
):
raise
NotImplementedError
()
raise
NotImplementedError
()
def
prepend_clip_op_desc
(
self
,
op_descs
):
grad_names
=
set
()
for
op_desc
in
op_descs
:
grad_names
.
update
(
filter
(
lambda
n
:
n
.
find
(
core
.
grad_var_suffix
())
!=
-
1
,
op_desc
.
output_arg_names
()))
for
n
in
grad_names
:
op_descs
.
append
(
self
.
create_clip_op_desc
(
grad_name
=
n
))
class
ErrorClipByValue
(
BaseErrorClipAttr
):
class
ErrorClipByValue
(
BaseErrorClipAttr
):
def
__init__
(
self
,
max
,
min
=
None
):
def
__init__
(
self
,
max
,
min
=
None
):
...
@@ -29,14 +20,25 @@ class ErrorClipByValue(BaseErrorClipAttr):
...
@@ -29,14 +20,25 @@ class ErrorClipByValue(BaseErrorClipAttr):
self
.
max
=
max
self
.
max
=
max
self
.
min
=
min
self
.
min
=
min
def
create_clip_op_desc
(
self
,
grad_name
):
def
append_clip_op
(
self
,
block
,
grad_name
):
desc
=
core
.
OpDesc
()
block
.
append_op
(
desc
.
set_type
(
"clip"
)
type
=
"clip"
,
desc
.
set_input
(
"X"
,
grad_name
)
inputs
=
{
"X"
:
grad_name
},
desc
.
set_output
(
"Out"
,
grad_name
)
outputs
=
{
"Out"
:
grad_name
},
desc
.
set_attr
(
"min"
,
self
.
min
)
attrs
=
{
"min"
:
self
.
min
,
desc
.
set_attr
(
"max"
,
self
.
max
)
"max"
:
self
.
max
})
return
desc
def
error_clip_callback
(
block
,
context
):
# the context is a grad_to_var map
grad_to_var
=
context
op_desc
=
block
.
desc
.
op
(
block
.
desc
.
op_size
()
-
1
)
for
grad_n
in
filter
(
lambda
n
:
grad_to_var
.
has_key
(
n
),
op_desc
.
output_arg_names
()):
fwd_var
=
block
.
var_recursive
(
grad_to_var
[
grad_n
])
error_clip
=
getattr
(
fwd_var
,
"error_clip"
,
None
)
if
error_clip
is
not
None
:
error_clip
.
append_clip_op
(
block
,
grad_n
)
class
BaseGradientClipAttr
(
object
):
class
BaseGradientClipAttr
(
object
):
...
...
python/paddle/v2/fluid/framework.py
浏览文件 @
dea52631
...
@@ -147,6 +147,7 @@ class Variable(object):
...
@@ -147,6 +147,7 @@ class Variable(object):
dtype
=
None
,
dtype
=
None
,
lod_level
=
None
,
lod_level
=
None
,
persistable
=
None
,
persistable
=
None
,
error_clip
=
None
,
stop_gradient
=
False
,
stop_gradient
=
False
,
**
kwargs
):
**
kwargs
):
self
.
block
=
block
self
.
block
=
block
...
@@ -626,6 +627,17 @@ class Block(object):
...
@@ -626,6 +627,17 @@ class Block(object):
raise
ValueError
(
"var %s not in this block"
%
name
)
raise
ValueError
(
"var %s not in this block"
%
name
)
return
v
return
v
def
var_recursive
(
self
,
name
):
if
self
.
has_var
(
name
):
return
self
.
var
(
name
)
else
:
if
self
.
idx
==
0
:
raise
ValueError
(
"var %s is not in block(%d) nor its parents."
%
name
,
self
.
idx
)
else
:
parent_block
=
self
.
program
.
block
(
self
.
parent_idx
)
return
parent_block
.
var_recursive
(
name
)
def
all_parameters
(
self
):
def
all_parameters
(
self
):
return
list
(
self
.
iter_parameters
())
return
list
(
self
.
iter_parameters
())
...
@@ -744,6 +756,7 @@ class Block(object):
...
@@ -744,6 +756,7 @@ class Block(object):
optimize_attr
=
p
.
optimize_attr
,
optimize_attr
=
p
.
optimize_attr
,
regularizer
=
p
.
regularizer
,
regularizer
=
p
.
regularizer
,
clip_attr
=
p
.
clip_attr
,
clip_attr
=
p
.
clip_attr
,
error_clip
=
p
.
error_clip
,
name
=
v
.
name
)
name
=
v
.
name
)
self
.
vars
[
new_p
.
name
]
=
new_p
self
.
vars
[
new_p
.
name
]
=
new_p
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录