Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
Crayon鑫
Paddle
提交
de46b159
P
Paddle
项目概览
Crayon鑫
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
de46b159
编写于
12月 05, 2019
作者:
L
lilong12
提交者:
GitHub
12月 05, 2019
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Unify the rank of prelu alpha to 4, corresponding to [N, C, H, W], except for the all mode
上级
932aca16
变更
2
隐藏空白更改
内联
并排
Showing
2 changed file
with
7 addition
and
6 deletion
+7
-6
python/paddle/fluid/layers/nn.py
python/paddle/fluid/layers/nn.py
+1
-1
python/paddle/fluid/tests/unittests/test_layers.py
python/paddle/fluid/tests/unittests/test_layers.py
+6
-5
未找到文件。
python/paddle/fluid/layers/nn.py
浏览文件 @
de46b159
...
@@ -8693,7 +8693,7 @@ def prelu(x, mode, param_attr=None, name=None):
...
@@ -8693,7 +8693,7 @@ def prelu(x, mode, param_attr=None, name=None):
if mode == 'channel':
if mode == 'channel':
alpha_shape = [1, x.shape[1], 1, 1]
alpha_shape = [1, x.shape[1], 1, 1]
elif mode == 'element':
elif mode == 'element':
alpha_shape =
x.shape[1:
]
alpha_shape =
[1, x.shape[1], x.shape[2], x.shape[3]
]
dtype = helper.input_dtype(input_param_name='x')
dtype = helper.input_dtype(input_param_name='x')
alpha = helper.create_parameter(
alpha = helper.create_parameter(
attr=helper.param_attr,
attr=helper.param_attr,
...
...
python/paddle/fluid/tests/unittests/test_layers.py
浏览文件 @
de46b159
...
@@ -712,7 +712,7 @@ class TestLayer(LayerTest):
...
@@ -712,7 +712,7 @@ class TestLayer(LayerTest):
self
.
assertTrue
(
self
.
assertTrue
(
np
.
array_equal
(
btp1
.
bias
.
numpy
(),
btp2
.
bias
.
numpy
()))
np
.
array_equal
(
btp1
.
bias
.
numpy
(),
btp2
.
bias
.
numpy
()))
def
test_prelu
(
self
):
def
prelu_test
(
self
,
mode
):
inp_np
=
np
.
ones
([
5
,
200
,
100
,
100
]).
astype
(
'float32'
)
inp_np
=
np
.
ones
([
5
,
200
,
100
,
100
]).
astype
(
'float32'
)
with
self
.
static_graph
():
with
self
.
static_graph
():
data_t
=
layers
.
data
(
data_t
=
layers
.
data
(
...
@@ -720,7 +720,6 @@ class TestLayer(LayerTest):
...
@@ -720,7 +720,6 @@ class TestLayer(LayerTest):
shape
=
[
5
,
200
,
100
,
100
],
shape
=
[
5
,
200
,
100
,
100
],
dtype
=
"float32"
,
dtype
=
"float32"
,
append_batch_size
=
False
)
append_batch_size
=
False
)
mode
=
'channel'
out
=
layers
.
prelu
(
out
=
layers
.
prelu
(
data_t
,
mode
,
param_attr
=
ParamAttr
(
initializer
=
Constant
(
1.0
)))
data_t
,
mode
,
param_attr
=
ParamAttr
(
initializer
=
Constant
(
1.0
)))
static_rlt
=
self
.
get_static_graph_result
(
static_rlt
=
self
.
get_static_graph_result
(
...
@@ -732,7 +731,6 @@ class TestLayer(LayerTest):
...
@@ -732,7 +731,6 @@ class TestLayer(LayerTest):
shape
=
[
5
,
200
,
100
,
100
],
shape
=
[
5
,
200
,
100
,
100
],
dtype
=
"float32"
,
dtype
=
"float32"
,
append_batch_size
=
False
)
append_batch_size
=
False
)
mode
=
'channel'
prelu
=
nn
.
PRelu
(
prelu
=
nn
.
PRelu
(
'prelu'
,
'prelu'
,
mode
=
mode
,
mode
=
mode
,
...
@@ -742,7 +740,6 @@ class TestLayer(LayerTest):
...
@@ -742,7 +740,6 @@ class TestLayer(LayerTest):
feed
=
{
"input"
:
inp_np
},
fetch_list
=
[
out
])[
0
]
feed
=
{
"input"
:
inp_np
},
fetch_list
=
[
out
])[
0
]
with
self
.
dynamic_graph
():
with
self
.
dynamic_graph
():
mode
=
'channel'
prelu
=
nn
.
PRelu
(
prelu
=
nn
.
PRelu
(
'prelu'
,
'prelu'
,
mode
=
mode
,
mode
=
mode
,
...
@@ -756,7 +753,6 @@ class TestLayer(LayerTest):
...
@@ -756,7 +753,6 @@ class TestLayer(LayerTest):
with
self
.
dynamic_graph
():
with
self
.
dynamic_graph
():
inp_np
=
np
.
random
.
randn
(
5
,
200
,
100
,
100
).
astype
(
"float32"
)
inp_np
=
np
.
random
.
randn
(
5
,
200
,
100
,
100
).
astype
(
"float32"
)
inp
=
base
.
to_variable
(
inp_np
)
inp
=
base
.
to_variable
(
inp_np
)
mode
=
'channel'
prelu1
=
nn
.
PRelu
(
prelu1
=
nn
.
PRelu
(
'prelu1'
,
'prelu1'
,
mode
=
mode
,
mode
=
mode
,
...
@@ -779,6 +775,11 @@ class TestLayer(LayerTest):
...
@@ -779,6 +775,11 @@ class TestLayer(LayerTest):
self
.
assertTrue
(
self
.
assertTrue
(
np
.
array_equal
(
prelu1
.
weight
.
numpy
(),
prelu2
.
weight
.
numpy
()))
np
.
array_equal
(
prelu1
.
weight
.
numpy
(),
prelu2
.
weight
.
numpy
()))
def
test_prelu
(
self
):
self
.
prelu_test
(
"channel"
)
self
.
prelu_test
(
"element"
)
self
.
prelu_test
(
"all"
)
def
test_embeding
(
self
):
def
test_embeding
(
self
):
inp_word
=
np
.
array
([[[
1
]]]).
astype
(
'int64'
)
inp_word
=
np
.
array
([[[
1
]]]).
astype
(
'int64'
)
dict_size
=
20
dict_size
=
20
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录