Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
948bc8b7
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
948bc8b7
编写于
8月 05, 2020
作者:
L
LielinJiang
提交者:
GitHub
8月 05, 2020
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Add apply for Layer (#25812)
* add apply for Layer
上级
3dd2e380
变更
2
显示空白变更内容
内联
并排
Showing
2 changed file
with
129 addition
and
0 deletion
+129
-0
python/paddle/fluid/dygraph/layers.py
python/paddle/fluid/dygraph/layers.py
+39
-0
python/paddle/fluid/tests/unittests/test_imperative_layer_apply.py
...ddle/fluid/tests/unittests/test_imperative_layer_apply.py
+90
-0
未找到文件。
python/paddle/fluid/dygraph/layers.py
浏览文件 @
948bc8b7
...
@@ -129,6 +129,45 @@ class Layer(core.Layer):
...
@@ -129,6 +129,45 @@ class Layer(core.Layer):
for
layer
in
self
.
sublayers
():
for
layer
in
self
.
sublayers
():
layer
.
eval
()
layer
.
eval
()
def
apply
(
self
,
fn
):
"""
Applies ``fn`` recursively to every sublayer (as returned by ``.sublayers()``)
as well as self. Typical use includes initializing the parameters of a model.
Parameters:
fn (function): a function to be applied to each sublayer
Returns:
Layer: self
Example::
.. code-block:: python
import paddle
import paddle.nn as nn
paddle.enable_imperative()
net = nn.Sequential(nn.Linear(2, 2), nn.Linear(2, 2))
def init_weights(layer):
if type(layer) == nn.Linear:
print('before init weight:', layer.weight.numpy())
new_weight = paddle.fill_constant(layer.weight.shape, layer.weight.dtype, value=0.9)
layer.weight.set_value(new_weight)
print('after init weight:', layer.weight.numpy())
net.apply(init_weights)
print(net.state_dict())
"""
for
layer
in
self
.
sublayers
():
layer
.
apply
(
fn
)
fn
(
self
)
return
self
def
full_name
(
self
):
def
full_name
(
self
):
"""Full name for this layer, composed by name_scope + "/" + MyLayer.__class__.__name__
"""Full name for this layer, composed by name_scope + "/" + MyLayer.__class__.__name__
...
...
python/paddle/fluid/tests/unittests/test_imperative_layer_apply.py
0 → 100644
浏览文件 @
948bc8b7
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from
__future__
import
print_function
import
unittest
import
paddle
import
paddle.nn
as
nn
import
paddle.fluid
as
fluid
import
numpy
as
np
class
LeNetDygraph
(
fluid
.
dygraph
.
Layer
):
def
__init__
(
self
,
num_classes
=
10
,
classifier_activation
=
'softmax'
):
super
(
LeNetDygraph
,
self
).
__init__
()
self
.
num_classes
=
num_classes
self
.
features
=
nn
.
Sequential
(
nn
.
Conv2D
(
1
,
6
,
3
,
stride
=
1
,
padding
=
1
),
nn
.
ReLU
(),
nn
.
Pool2D
(
2
,
'max'
,
2
),
nn
.
Conv2D
(
6
,
16
,
5
,
stride
=
1
,
padding
=
0
),
nn
.
ReLU
(),
nn
.
Pool2D
(
2
,
'max'
,
2
))
if
num_classes
>
0
:
self
.
fc
=
nn
.
Sequential
(
nn
.
Linear
(
400
,
120
),
nn
.
Linear
(
120
,
84
),
nn
.
Linear
(
84
,
10
,
act
=
classifier_activation
))
def
forward
(
self
,
inputs
):
x
=
self
.
features
(
inputs
)
if
self
.
num_classes
>
0
:
x
=
fluid
.
layers
.
flatten
(
x
,
1
)
x
=
self
.
fc
(
x
)
return
x
def
init_weights
(
layer
):
if
type
(
layer
)
==
nn
.
Linear
:
new_weight
=
paddle
.
fill_constant
(
layer
.
weight
.
shape
,
layer
.
weight
.
dtype
,
value
=
0.9
)
layer
.
weight
.
set_value
(
new_weight
)
new_bias
=
paddle
.
fill_constant
(
layer
.
bias
.
shape
,
layer
.
bias
.
dtype
,
value
=-
0.1
)
layer
.
bias
.
set_value
(
new_bias
)
elif
type
(
layer
)
==
nn
.
Conv2D
:
new_weight
=
paddle
.
fill_constant
(
layer
.
weight
.
shape
,
layer
.
weight
.
dtype
,
value
=
0.7
)
layer
.
weight
.
set_value
(
new_weight
)
new_bias
=
paddle
.
fill_constant
(
layer
.
bias
.
shape
,
layer
.
bias
.
dtype
,
value
=-
0.2
)
layer
.
bias
.
set_value
(
new_bias
)
class
TestLayerApply
(
unittest
.
TestCase
):
def
test_apply_init_weight
(
self
):
with
fluid
.
dygraph
.
guard
():
net
=
LeNetDygraph
()
net
.
apply
(
init_weights
)
for
layer
in
net
.
sublayers
():
if
type
(
layer
)
==
nn
.
Linear
:
np
.
testing
.
assert_allclose
(
layer
.
weight
.
numpy
(),
0.9
)
np
.
testing
.
assert_allclose
(
layer
.
bias
.
numpy
(),
-
0.1
)
elif
type
(
layer
)
==
nn
.
Conv2D
:
np
.
testing
.
assert_allclose
(
layer
.
weight
.
numpy
(),
0.7
)
np
.
testing
.
assert_allclose
(
layer
.
bias
.
numpy
(),
-
0.2
)
if
__name__
==
'__main__'
:
unittest
.
main
()
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录