Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
Crayon鑫
Paddle
提交
74523c41
P
Paddle
项目概览
Crayon鑫
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
74523c41
编写于
3月 09, 2018
作者:
C
chengduoZH
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
enhance regularizer.py
上级
0d49b921
变更
1
隐藏空白更改
内联
并排
Showing
1 changed file
with
35 addition
and
5 deletion
+35
-5
python/paddle/fluid/regularizer.py
python/paddle/fluid/regularizer.py
+35
-5
未找到文件。
python/paddle/fluid/regularizer.py
浏览文件 @
74523c41
...
@@ -13,6 +13,7 @@
...
@@ -13,6 +13,7 @@
# limitations under the License.
# limitations under the License.
import
framework
import
framework
from
.
import
core
__all__
=
[
__all__
=
[
'append_regularization_ops'
,
'append_regularization_ops'
,
...
@@ -46,9 +47,9 @@ def append_regularization_ops(parameters_and_grads, regularization=None):
...
@@ -46,9 +47,9 @@ def append_regularization_ops(parameters_and_grads, regularization=None):
regularization_term
=
None
regularization_term
=
None
if
param
.
regularizer
is
not
None
:
if
param
.
regularizer
is
not
None
:
# Add variable for regularization term in grad block
# Add variable for regularization term in grad block
regularization_term
=
param
.
regularizer
(
param
,
grad
.
block
)
regularization_term
=
param
.
regularizer
(
param
,
grad
,
grad
.
block
)
elif
regularization
is
not
None
:
elif
regularization
is
not
None
:
regularization_term
=
regularization
(
param
,
grad
.
block
)
regularization_term
=
regularization
(
param
,
grad
,
grad
.
block
)
# If no gradient or no regularization specified,
# If no gradient or no regularization specified,
# then we don't need to do anything
# then we don't need to do anything
...
@@ -82,7 +83,7 @@ class WeightDecayRegularizer(object):
...
@@ -82,7 +83,7 @@ class WeightDecayRegularizer(object):
def
__init__
(
self
):
def
__init__
(
self
):
pass
pass
def
__call__
(
self
,
param
,
block
):
def
__call__
(
self
,
param
,
grad
,
block
):
"""Add corresponding weight decay operations to the network
"""Add corresponding weight decay operations to the network
"""
"""
raise
NotImplementedError
()
raise
NotImplementedError
()
...
@@ -102,7 +103,7 @@ class L2DecayRegularizer(WeightDecayRegularizer):
...
@@ -102,7 +103,7 @@ class L2DecayRegularizer(WeightDecayRegularizer):
super
(
L2DecayRegularizer
,
self
).
__init__
()
super
(
L2DecayRegularizer
,
self
).
__init__
()
self
.
_regularization_coeff
=
regularization_coeff
self
.
_regularization_coeff
=
regularization_coeff
def
__call__
(
self
,
param
,
block
):
def
__call__
(
self
,
param
,
grad
,
block
):
"""Add L2 weight decay ops to network
"""Add L2 weight decay ops to network
Adds L2 weight decay ops.
Adds L2 weight decay ops.
...
@@ -117,8 +118,23 @@ class L2DecayRegularizer(WeightDecayRegularizer):
...
@@ -117,8 +118,23 @@ class L2DecayRegularizer(WeightDecayRegularizer):
"""
"""
assert
isinstance
(
param
,
framework
.
Parameter
)
assert
isinstance
(
param
,
framework
.
Parameter
)
assert
isinstance
(
block
,
framework
.
Block
)
assert
isinstance
(
block
,
framework
.
Block
)
decay
=
block
.
create_var
(
decay
=
block
.
create_var
(
dtype
=
"float32"
,
shape
=
param
.
shape
,
lod_level
=
param
.
lod_level
)
dtype
=
"float32"
,
shape
=
param
.
shape
,
lod_level
=
param
.
lod_level
)
if
grad
.
type
==
core
.
VarDesc
.
VarType
.
SELECTED_ROWS
:
decay
=
block
.
create_var
(
dtype
=
"float32"
,
shape
=
param
.
shape
,
type
=
core
.
VarDesc
.
VarType
.
SELECTED_ROWS
)
block
.
append_op
(
type
=
'lookup_table'
,
inputs
=
{
'W'
:
param
,
'Ids'
:
grad
},
outputs
=
{
'Out'
:
decay
},
attrs
=
{
'is_sparse'
:
True
})
param
=
decay
# Append Op to calculate decay
# Append Op to calculate decay
block
.
append_op
(
block
.
append_op
(
type
=
'scale'
,
type
=
'scale'
,
...
@@ -141,7 +157,7 @@ class L1DecayRegularizer(WeightDecayRegularizer):
...
@@ -141,7 +157,7 @@ class L1DecayRegularizer(WeightDecayRegularizer):
super
(
L1DecayRegularizer
,
self
).
__init__
()
super
(
L1DecayRegularizer
,
self
).
__init__
()
self
.
_regularization_coeff
=
regularization_coeff
self
.
_regularization_coeff
=
regularization_coeff
def
__call__
(
self
,
param
,
block
):
def
__call__
(
self
,
param
,
grad
,
block
):
"""Add L1 weight decay ops to network
"""Add L1 weight decay ops to network
Adds L1 weight decay ops.
Adds L1 weight decay ops.
...
@@ -158,6 +174,20 @@ class L1DecayRegularizer(WeightDecayRegularizer):
...
@@ -158,6 +174,20 @@ class L1DecayRegularizer(WeightDecayRegularizer):
assert
isinstance
(
block
,
framework
.
Block
)
assert
isinstance
(
block
,
framework
.
Block
)
decay
=
block
.
create_var
(
decay
=
block
.
create_var
(
dtype
=
"float32"
,
shape
=
param
.
shape
,
lod_level
=
param
.
lod_level
)
dtype
=
"float32"
,
shape
=
param
.
shape
,
lod_level
=
param
.
lod_level
)
if
grad
.
type
==
core
.
VarDesc
.
VarType
.
SELECTED_ROWS
:
# add concat_rows
decay
=
block
.
create_var
(
dtype
=
"float32"
,
shape
=
param
.
shape
,
type
=
core
.
VarDesc
.
VarType
.
SELECTED_ROWS
)
block
.
append_op
(
type
=
'lookup_table'
,
inputs
=
{
'W'
:
param
,
'Ids'
:
grad
},
outputs
=
{
'Out'
:
decay
},
attrs
=
{
'is_sparse'
:
True
})
# Append sign op
# Append sign op
block
.
append_op
(
block
.
append_op
(
type
=
'sign'
,
inputs
=
{
"X"
:
param
},
outputs
=
{
"Out"
:
decay
})
type
=
'sign'
,
inputs
=
{
"X"
:
param
},
outputs
=
{
"Out"
:
decay
})
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录