Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
Crayon鑫
Paddle
提交
ea91ca2f
P
Paddle
项目概览
Crayon鑫
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
ea91ca2f
编写于
7月 27, 2022
作者:
Z
Zhong Hui
提交者:
GitHub
7月 27, 2022
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
[Eager] Add hierarchical_sigmoid yaml (#44638)
上级
ae25ab56
变更
5
隐藏空白更改
内联
并排
Showing
5 changed file
with
68 addition
and
10 deletion
+68
-10
paddle/phi/api/yaml/generator/api_base.py
paddle/phi/api/yaml/generator/api_base.py
+1
-1
paddle/phi/api/yaml/legacy_api.yaml
paddle/phi/api/yaml/legacy_api.yaml
+12
-0
paddle/phi/api/yaml/legacy_backward.yaml
paddle/phi/api/yaml/legacy_backward.yaml
+12
-1
python/paddle/fluid/tests/unittests/test_hsigmoid_op.py
python/paddle/fluid/tests/unittests/test_hsigmoid_op.py
+38
-7
python/paddle/nn/functional/loss.py
python/paddle/nn/functional/loss.py
+5
-1
未找到文件。
paddle/phi/api/yaml/generator/api_base.py
浏览文件 @
ea91ca2f
...
@@ -135,7 +135,7 @@ class BaseAPI(object):
...
@@ -135,7 +135,7 @@ class BaseAPI(object):
'double'
:
'double'
,
'double'
:
'double'
,
'bool'
:
'bool'
,
'bool'
:
'bool'
,
'str'
:
'const std::string&'
,
'str'
:
'const std::string&'
,
'str[]
'
:
'const std::vector<std::string>&'
,
'str[]'
:
'const std::vector<std::string>&'
,
'Place'
:
'const Place&'
,
'Place'
:
'const Place&'
,
'DataLayout'
:
'DataLayout'
,
'DataLayout'
:
'DataLayout'
,
'DataType'
:
'DataType'
,
'DataType'
:
'DataType'
,
...
...
paddle/phi/api/yaml/legacy_api.yaml
浏览文件 @
ea91ca2f
...
@@ -1038,6 +1038,18 @@
...
@@ -1038,6 +1038,18 @@
func
:
hard_swish
func
:
hard_swish
backward
:
hard_swish_grad
backward
:
hard_swish_grad
# hierarchical_sigmoid
-
api
:
hierarchical_sigmoid
args
:
(Tensor x, Tensor w, Tensor label, Tensor path, Tensor code, Tensor bias, int num_classes, bool remote_prefetch, int trainer_id, int64_t[] height_sections, str[] epmap, str[] table_names, bool is_sparse)
output
:
Tensor(out), Tensor(pre_out), Tensor(w_out)
infer_meta
:
func
:
HierarchicalSigmoidInferMeta
optional
:
path, code, bias
kernel
:
func
:
hierarchical_sigmoid
data_type
:
x
backward
:
hierarchical_sigmoid_grad
# histogram
# histogram
-
api
:
histogram
-
api
:
histogram
args
:
(Tensor x, int64_t bins, int min, int max)
args
:
(Tensor x, int64_t bins, int min, int max)
...
...
paddle/phi/api/yaml/legacy_backward.yaml
浏览文件 @
ea91ca2f
...
@@ -935,6 +935,17 @@
...
@@ -935,6 +935,17 @@
func
:
hard_swish_grad
func
:
hard_swish_grad
inplace
:
(out_grad -> x_grad)
inplace
:
(out_grad -> x_grad)
-
backward_api
:
hierarchical_sigmoid_grad
forward
:
hierarchical_sigmoid (Tensor x, Tensor w, Tensor label, Tensor path, Tensor code, Tensor bias, int num_classes, bool remote_prefetch, int trainer_id, int64_t[] height_sections, str[] epmap, str[] table_names, bool is_sparse) -> Tensor(out), Tensor(pre_out), Tensor(w_out)
args
:
(Tensor x, Tensor w, Tensor label, Tensor path, Tensor code, Tensor bias, Tensor pre_out, Tensor out_grad, int num_classes, bool remote_prefetch, int trainer_id, int64_t[] height_sections, str[] epmap, str[] table_names, bool is_sparse)
output
:
Tensor(x_grad), Tensor(w_grad), Tensor(bias_grad)
infer_meta
:
func
:
GeneralTernaryGradInferMeta
param
:
[
x
,
w
,
bias
]
optional
:
path, code, bias
kernel
:
func
:
hierarchical_sigmoid_grad
-
backward_api
:
huber_loss_grad
-
backward_api
:
huber_loss_grad
forward
:
huber_loss (Tensor input, Tensor label, float delta) -> Tensor(out), Tensor(residual)
forward
:
huber_loss (Tensor input, Tensor label, float delta) -> Tensor(out), Tensor(residual)
args
:
(Tensor residual, Tensor out_grad, float delta)
args
:
(Tensor residual, Tensor out_grad, float delta)
...
@@ -2396,7 +2407,7 @@
...
@@ -2396,7 +2407,7 @@
backward
:
unsqueeze_double_grad
backward
:
unsqueeze_double_grad
-
backward_api
:
warpctc_grad
-
backward_api
:
warpctc_grad
forward
:
warpctc (Tensor logits, Tensor label, Tensor logits_length, Tensor labels_length, int blank, bool norm_by_times) -> Tensor(loss), Tensor(warpctcgrad)
forward
:
warpctc (Tensor logits, Tensor label, Tensor logits_length, Tensor labels_length, int blank, bool norm_by_times) -> Tensor(loss), Tensor(warpctcgrad)
args
:
(Tensor logits, Tensor logits_length, Tensor warpctcgrad, Tensor loss_grad, int blank, bool norm_by_times)
args
:
(Tensor logits, Tensor logits_length, Tensor warpctcgrad, Tensor loss_grad, int blank, bool norm_by_times)
output
:
Tensor(logits_grad)
output
:
Tensor(logits_grad)
infer_meta
:
infer_meta
:
...
...
python/paddle/fluid/tests/unittests/test_hsigmoid_op.py
浏览文件 @
ea91ca2f
...
@@ -172,10 +172,30 @@ def hsigmoidWithCustomTree(x, w, path_table, path_code, label, bias,
...
@@ -172,10 +172,30 @@ def hsigmoidWithCustomTree(x, w, path_table, path_code, label, bias,
return
pre_output
,
out
return
pre_output
,
out
def
python_api
(
input
,
weight
,
label
,
path_table
=
None
,
path_code
=
None
,
bias
=
None
,
num_classes
=-
1
,
is_sparse
=
False
,
remote_prefetch
=
False
):
assert
is_sparse
==
remote_prefetch
,
"is_sparse is equal to remote_prefetch in dygraph."
return
paddle
.
nn
.
functional
.
hsigmoid_loss
(
input
,
label
,
num_classes
,
weight
,
bias
,
path_table
,
path_code
,
is_sparse
)
python_out_sig
=
[
"Out"
]
class
TestHSigmoidOp
(
OpTest
):
class
TestHSigmoidOp
(
OpTest
):
def
setUp
(
self
):
def
setUp
(
self
):
self
.
op_type
=
"hierarchical_sigmoid"
self
.
op_type
=
"hierarchical_sigmoid"
self
.
python_api
=
python_api
self
.
python_out_sig
=
python_out_sig
num_classes
=
101
num_classes
=
101
feature_size
=
5
feature_size
=
5
batch_size
=
20
batch_size
=
20
...
@@ -193,11 +213,12 @@ class TestHSigmoidOp(OpTest):
...
@@ -193,11 +213,12 @@ class TestHSigmoidOp(OpTest):
self
.
user_grads
=
hsigmoid_grad
(
x
,
w
,
label
,
bias
,
num_classes
)
self
.
user_grads
=
hsigmoid_grad
(
x
,
w
,
label
,
bias
,
num_classes
)
def
test_check_output
(
self
):
def
test_check_output
(
self
):
self
.
check_output
()
self
.
check_output
(
check_eager
=
True
)
def
test_check_grad
(
self
):
def
test_check_grad
(
self
):
self
.
check_grad
([
'X'
,
'W'
,
'Bias'
],
[
'Out'
],
self
.
check_grad
([
'X'
,
'W'
,
'Bias'
],
[
'Out'
],
user_defined_grads
=
self
.
user_grads
)
user_defined_grads
=
self
.
user_grads
,
check_eager
=
True
)
@
skip_check_grad_ci
(
@
skip_check_grad_ci
(
...
@@ -208,6 +229,8 @@ class TestHSigmoidOpSparse(OpTest):
...
@@ -208,6 +229,8 @@ class TestHSigmoidOpSparse(OpTest):
def
setUp
(
self
):
def
setUp
(
self
):
self
.
op_type
=
"hierarchical_sigmoid"
self
.
op_type
=
"hierarchical_sigmoid"
self
.
python_api
=
python_api
self
.
python_out_sig
=
python_out_sig
num_classes
=
6
#using 1,2,3,4,5,6 to build a huffman tree and select 1,2,5,6 as sample
num_classes
=
6
#using 1,2,3,4,5,6 to build a huffman tree and select 1,2,5,6 as sample
feature_size
=
8
feature_size
=
8
batch_size
=
4
batch_size
=
4
...
@@ -237,7 +260,7 @@ class TestHSigmoidOpSparse(OpTest):
...
@@ -237,7 +260,7 @@ class TestHSigmoidOpSparse(OpTest):
self
.
outputs
=
{
'PreOut'
:
pre_output
,
'Out'
:
out
}
self
.
outputs
=
{
'PreOut'
:
pre_output
,
'Out'
:
out
}
def
test_check_output
(
self
):
def
test_check_output
(
self
):
self
.
check_output
()
self
.
check_output
(
check_eager
=
True
)
class
TestHSigmoidOpWithSparseGrad
(
unittest
.
TestCase
):
class
TestHSigmoidOpWithSparseGrad
(
unittest
.
TestCase
):
...
@@ -318,6 +341,8 @@ class TestHSigmoidOpWithCostumTree(OpTest):
...
@@ -318,6 +341,8 @@ class TestHSigmoidOpWithCostumTree(OpTest):
def
setUp
(
self
):
def
setUp
(
self
):
self
.
op_type
=
"hierarchical_sigmoid"
self
.
op_type
=
"hierarchical_sigmoid"
self
.
python_api
=
python_api
self
.
python_out_sig
=
python_out_sig
num_classes
=
6
#using 1,2,3,4,5,6 to build a huffman tree and select 1,2,5,6 as sample
num_classes
=
6
#using 1,2,3,4,5,6 to build a huffman tree and select 1,2,5,6 as sample
feature_size
=
8
feature_size
=
8
batch_size
=
4
batch_size
=
4
...
@@ -347,10 +372,12 @@ class TestHSigmoidOpWithCostumTree(OpTest):
...
@@ -347,10 +372,12 @@ class TestHSigmoidOpWithCostumTree(OpTest):
self
.
outputs
=
{
'PreOut'
:
pre_output
,
'Out'
:
out
}
self
.
outputs
=
{
'PreOut'
:
pre_output
,
'Out'
:
out
}
def
test_check_output
(
self
):
def
test_check_output
(
self
):
self
.
check_output
()
self
.
check_output
(
check_eager
=
True
)
def
test_check_grad
(
self
):
def
test_check_grad
(
self
):
self
.
check_grad
([
'Bias'
,
'X'
,
'W'
],
[
'Out'
],
no_grad_set
=
set
(
'Label'
))
self
.
check_grad
([
'Bias'
,
'X'
,
'W'
],
[
'Out'
],
no_grad_set
=
set
(
'Label'
),
check_eager
=
True
)
@
skip_check_grad_ci
(
@
skip_check_grad_ci
(
...
@@ -361,6 +388,8 @@ class TestHSigmoidOpWithCostumTreeWithoutBias(OpTest):
...
@@ -361,6 +388,8 @@ class TestHSigmoidOpWithCostumTreeWithoutBias(OpTest):
def
setUp
(
self
):
def
setUp
(
self
):
self
.
op_type
=
"hierarchical_sigmoid"
self
.
op_type
=
"hierarchical_sigmoid"
self
.
python_api
=
python_api
self
.
python_out_sig
=
python_out_sig
num_classes
=
6
#using 1,2,3,4,5,6 to build a huffman tree and select 1,2,5,6 as sample
num_classes
=
6
#using 1,2,3,4,5,6 to build a huffman tree and select 1,2,5,6 as sample
feature_size
=
8
feature_size
=
8
batch_size
=
4
batch_size
=
4
...
@@ -394,10 +423,12 @@ class TestHSigmoidOpWithCostumTreeWithoutBias(OpTest):
...
@@ -394,10 +423,12 @@ class TestHSigmoidOpWithCostumTreeWithoutBias(OpTest):
self
.
outputs
=
{
'PreOut'
:
pre_output
,
'Out'
:
out
}
self
.
outputs
=
{
'PreOut'
:
pre_output
,
'Out'
:
out
}
def
test_check_output
(
self
):
def
test_check_output
(
self
):
self
.
check_output
()
self
.
check_output
(
check_eager
=
True
)
def
test_check_grad
(
self
):
def
test_check_grad
(
self
):
self
.
check_grad
([
'X'
,
'W'
],
[
'Out'
],
no_grad_set
=
set
(
'Label'
))
self
.
check_grad
([
'X'
,
'W'
],
[
'Out'
],
no_grad_set
=
set
(
'Label'
),
check_eager
=
True
)
class
TestHSigmoidLossAPI
(
unittest
.
TestCase
):
class
TestHSigmoidLossAPI
(
unittest
.
TestCase
):
...
...
python/paddle/nn/functional/loss.py
浏览文件 @
ea91ca2f
...
@@ -920,7 +920,11 @@ def hsigmoid_loss(input,
...
@@ -920,7 +920,11 @@ def hsigmoid_loss(input,
# [2.11009121]
# [2.11009121]
# [1.92374969]]
# [1.92374969]]
"""
"""
if
in_dygraph_mode
():
out
,
_
,
_
=
_C_ops
.
final_state_hierarchical_sigmoid
(
input
,
weight
,
label
,
path_table
,
path_code
,
bias
,
num_classes
,
is_sparse
,
0
,
[],
[],
[],
is_sparse
)
return
out
if
_non_static_mode
():
if
_non_static_mode
():
out
,
_
,
_
=
_C_ops
.
hierarchical_sigmoid
(
input
,
weight
,
label
,
out
,
_
,
_
=
_C_ops
.
hierarchical_sigmoid
(
input
,
weight
,
label
,
path_table
,
path_code
,
bias
,
path_table
,
path_code
,
bias
,
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录