Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
机器未来
Paddle
提交
8045fcfd
P
Paddle
项目概览
机器未来
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
8045fcfd
编写于
6月 10, 2022
作者:
A
Allen Guo
提交者:
GitHub
6月 10, 2022
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
add unary ops (#773) (#43363)
* add unary ops * move to activation_ops
上级
6aee6410
变更
2
隐藏空白更改
内联
并排
Showing
2 changed file
with
329 addition
and
6 deletion
+329
-6
paddle/fluid/platform/device/ipu/popart_canonicalization/activation_ops.cc
...form/device/ipu/popart_canonicalization/activation_ops.cc
+96
-6
python/paddle/fluid/tests/unittests/ipu/test_unary_ops_ipu.py
...on/paddle/fluid/tests/unittests/ipu/test_unary_ops_ipu.py
+233
-0
未找到文件。
paddle/fluid/platform/device/ipu/popart_canonicalization/activation_ops.cc
浏览文件 @
8045fcfd
...
@@ -27,26 +27,98 @@ Node *activation_op_handler(Graph *graph, Node *node, const std::string &type) {
...
@@ -27,26 +27,98 @@ Node *activation_op_handler(Graph *graph, Node *node, const std::string &type) {
return
new_node
;
return
new_node
;
}
}
Node
*
relu
_handler
(
Graph
*
graph
,
Node
*
node
)
{
Node
*
abs
_handler
(
Graph
*
graph
,
Node
*
node
)
{
return
activation_op_handler
(
graph
,
node
,
"popart_
relu
"
);
return
activation_op_handler
(
graph
,
node
,
"popart_
abs
"
);
}
}
Node
*
tanh_handler
(
Graph
*
graph
,
Node
*
node
)
{
Node
*
acos_handler
(
Graph
*
graph
,
Node
*
node
)
{
return
activation_op_handler
(
graph
,
node
,
"popart_tanh"
);
return
activation_op_handler
(
graph
,
node
,
"popart_acos"
);
}
Node
*
asin_handler
(
Graph
*
graph
,
Node
*
node
)
{
return
activation_op_handler
(
graph
,
node
,
"popart_asin"
);
}
Node
*
atan_handler
(
Graph
*
graph
,
Node
*
node
)
{
return
activation_op_handler
(
graph
,
node
,
"popart_atan"
);
}
Node
*
ceil_handler
(
Graph
*
graph
,
Node
*
node
)
{
return
activation_op_handler
(
graph
,
node
,
"popart_ceil"
);
}
Node
*
cos_handler
(
Graph
*
graph
,
Node
*
node
)
{
return
activation_op_handler
(
graph
,
node
,
"popart_cos"
);
}
Node
*
cosh_handler
(
Graph
*
graph
,
Node
*
node
)
{
return
activation_op_handler
(
graph
,
node
,
"popart_cosh"
);
}
Node
*
erf_handler
(
Graph
*
graph
,
Node
*
node
)
{
return
activation_op_handler
(
graph
,
node
,
"popart_erf"
);
}
Node
*
exp_handler
(
Graph
*
graph
,
Node
*
node
)
{
return
activation_op_handler
(
graph
,
node
,
"popart_exp"
);
}
Node
*
floor_handler
(
Graph
*
graph
,
Node
*
node
)
{
return
activation_op_handler
(
graph
,
node
,
"popart_floor"
);
}
}
Node
*
log_handler
(
Graph
*
graph
,
Node
*
node
)
{
Node
*
log_handler
(
Graph
*
graph
,
Node
*
node
)
{
return
activation_op_handler
(
graph
,
node
,
"popart_log"
);
return
activation_op_handler
(
graph
,
node
,
"popart_log"
);
}
}
Node
*
reciprocal_handler
(
Graph
*
graph
,
Node
*
node
)
{
return
activation_op_handler
(
graph
,
node
,
"popart_reciprocal"
);
}
Node
*
relu_handler
(
Graph
*
graph
,
Node
*
node
)
{
return
activation_op_handler
(
graph
,
node
,
"popart_relu"
);
}
Node
*
round_handler
(
Graph
*
graph
,
Node
*
node
)
{
return
activation_op_handler
(
graph
,
node
,
"popart_round"
);
}
Node
*
sigmoid_handler
(
Graph
*
graph
,
Node
*
node
)
{
Node
*
sigmoid_handler
(
Graph
*
graph
,
Node
*
node
)
{
return
activation_op_handler
(
graph
,
node
,
"popart_sigmoid"
);
return
activation_op_handler
(
graph
,
node
,
"popart_sigmoid"
);
}
}
Node
*
sign_handler
(
Graph
*
graph
,
Node
*
node
)
{
return
activation_op_handler
(
graph
,
node
,
"popart_sign"
);
}
Node
*
sin_handler
(
Graph
*
graph
,
Node
*
node
)
{
return
activation_op_handler
(
graph
,
node
,
"popart_sin"
);
}
Node
*
sinh_handler
(
Graph
*
graph
,
Node
*
node
)
{
return
activation_op_handler
(
graph
,
node
,
"popart_sinh"
);
}
Node
*
softplus_handler
(
Graph
*
graph
,
Node
*
node
)
{
return
activation_op_handler
(
graph
,
node
,
"popart_softplus"
);
}
Node
*
softsign_handler
(
Graph
*
graph
,
Node
*
node
)
{
return
activation_op_handler
(
graph
,
node
,
"popart_softsign"
);
}
Node
*
sqrt_handler
(
Graph
*
graph
,
Node
*
node
)
{
Node
*
sqrt_handler
(
Graph
*
graph
,
Node
*
node
)
{
return
activation_op_handler
(
graph
,
node
,
"popart_sqrt"
);
return
activation_op_handler
(
graph
,
node
,
"popart_sqrt"
);
}
}
Node
*
tan_handler
(
Graph
*
graph
,
Node
*
node
)
{
return
activation_op_handler
(
graph
,
node
,
"popart_tan"
);
}
Node
*
tanh_handler
(
Graph
*
graph
,
Node
*
node
)
{
return
activation_op_handler
(
graph
,
node
,
"popart_tanh"
);
}
Node
*
gelu_handler
(
Graph
*
graph
,
Node
*
node
)
{
Node
*
gelu_handler
(
Graph
*
graph
,
Node
*
node
)
{
auto
*
op
=
node
->
Op
();
auto
*
op
=
node
->
Op
();
auto
approximate_
=
BOOST_GET_CONST
(
bool
,
op
->
GetAttr
(
"approximate"
));
auto
approximate_
=
BOOST_GET_CONST
(
bool
,
op
->
GetAttr
(
"approximate"
));
...
@@ -93,10 +165,28 @@ Node *log_softmax_handler(Graph *graph, Node *node) {
...
@@ -93,10 +165,28 @@ Node *log_softmax_handler(Graph *graph, Node *node) {
}
// namespace platform
}
// namespace platform
}
// namespace paddle
}
// namespace paddle
REGISTER_HANDLER
(
relu
,
relu_handler
);
REGISTER_HANDLER
(
abs
,
abs_handler
);
REGISTER_HANDLER
(
tanh
,
tanh_handler
);
REGISTER_HANDLER
(
acos
,
acos_handler
);
REGISTER_HANDLER
(
asin
,
asin_handler
);
REGISTER_HANDLER
(
atan
,
atan_handler
);
REGISTER_HANDLER
(
ceil
,
ceil_handler
);
REGISTER_HANDLER
(
cos
,
cos_handler
);
REGISTER_HANDLER
(
cosh
,
cosh_handler
);
REGISTER_HANDLER
(
erf
,
erf_handler
);
REGISTER_HANDLER
(
exp
,
exp_handler
);
REGISTER_HANDLER
(
floor
,
floor_handler
);
REGISTER_HANDLER
(
log
,
log_handler
);
REGISTER_HANDLER
(
log
,
log_handler
);
REGISTER_HANDLER
(
reciprocal
,
reciprocal_handler
);
REGISTER_HANDLER
(
relu
,
relu_handler
);
REGISTER_HANDLER
(
round
,
round_handler
);
REGISTER_HANDLER
(
sigmoid
,
sigmoid_handler
);
REGISTER_HANDLER
(
sigmoid
,
sigmoid_handler
);
REGISTER_HANDLER
(
sign
,
sign_handler
);
REGISTER_HANDLER
(
sin
,
sin_handler
);
REGISTER_HANDLER
(
sinh
,
sinh_handler
);
REGISTER_HANDLER
(
softplus
,
softplus_handler
);
REGISTER_HANDLER
(
softsign
,
softsign_handler
);
REGISTER_HANDLER
(
sqrt
,
sqrt_handler
);
REGISTER_HANDLER
(
sqrt
,
sqrt_handler
);
REGISTER_HANDLER
(
tan
,
tan_handler
);
REGISTER_HANDLER
(
tanh
,
tanh_handler
);
REGISTER_HANDLER
(
gelu
,
gelu_handler
);
REGISTER_HANDLER
(
gelu
,
gelu_handler
);
REGISTER_HANDLER
(
log_softmax
,
log_softmax_handler
);
REGISTER_HANDLER
(
log_softmax
,
log_softmax_handler
);
python/paddle/fluid/tests/unittests/ipu/test_
activation_x_op
_ipu.py
→
python/paddle/fluid/tests/unittests/ipu/test_
unary_ops
_ipu.py
浏览文件 @
8045fcfd
...
@@ -16,14 +16,13 @@ import unittest
...
@@ -16,14 +16,13 @@ import unittest
import
numpy
as
np
import
numpy
as
np
import
paddle
import
paddle
import
paddle.nn.functional
as
F
import
paddle.static
import
paddle.static
from
paddle.fluid.tests.unittests.ipu.op_test_ipu
import
IPUOpTest
from
paddle.fluid.tests.unittests.ipu.op_test_ipu
import
IPUOpTest
@
unittest
.
skipIf
(
not
paddle
.
is_compiled_with_ipu
(),
@
unittest
.
skipIf
(
not
paddle
.
is_compiled_with_ipu
(),
"core is not compiled with IPU"
)
"core is not compiled with IPU"
)
class
Test
Relu
(
IPUOpTest
):
class
Test
Base
(
IPUOpTest
):
def
setUp
(
self
):
def
setUp
(
self
):
self
.
set_atol
()
self
.
set_atol
()
...
@@ -33,7 +32,7 @@ class TestRelu(IPUOpTest):
...
@@ -33,7 +32,7 @@ class TestRelu(IPUOpTest):
self
.
set_feed_attr
()
self
.
set_feed_attr
()
def
set_test_op
(
self
):
def
set_test_op
(
self
):
self
.
op
=
paddle
.
fluid
.
layers
.
relu
self
.
op
=
paddle
.
fluid
.
layers
.
abs
self
.
op_attrs
=
{}
self
.
op_attrs
=
{}
def
set_data_feed
(
self
):
def
set_data_feed
(
self
):
...
@@ -64,33 +63,171 @@ class TestRelu(IPUOpTest):
...
@@ -64,33 +63,171 @@ class TestRelu(IPUOpTest):
self
.
check
()
self
.
check
()
class
TestTanh
(
TestRelu
):
class
TestAcos
(
TestBase
):
@
property
def
fp16_enabled
(
self
):
return
False
def
set_atol
(
self
):
super
().
set_atol
()
self
.
atol
=
1e-6
def
set_test_op
(
self
):
self
.
op
=
paddle
.
fluid
.
layers
.
acos
self
.
op_attrs
=
{}
class
TestAsin
(
TestAcos
):
def
set_test_op
(
self
):
self
.
op
=
paddle
.
fluid
.
layers
.
asin
self
.
op_attrs
=
{}
class
TestSinh
(
TestAcos
):
def
set_test_op
(
self
):
self
.
op
=
paddle
.
fluid
.
layers
.
sinh
self
.
op_attrs
=
{}
class
TestAtan
(
TestBase
):
def
set_test_op
(
self
):
self
.
op
=
paddle
.
fluid
.
layers
.
atan
self
.
op_attrs
=
{}
class
TestCeil
(
TestBase
):
def
set_test_op
(
self
):
self
.
op
=
paddle
.
fluid
.
layers
.
ceil
self
.
op_attrs
=
{}
class
TestCos
(
TestBase
):
def
set_test_op
(
self
):
self
.
op
=
paddle
.
fluid
.
layers
.
cos
self
.
op_attrs
=
{}
class
TestCosh
(
TestBase
):
def
set_test_op
(
self
):
self
.
op
=
paddle
.
fluid
.
layers
.
cosh
self
.
op_attrs
=
{}
class
TestErf
(
TestBase
):
def
set_test_op
(
self
):
self
.
op
=
paddle
.
fluid
.
layers
.
erf
self
.
op_attrs
=
{}
class
TestExp
(
TestBase
):
def
set_test_op
(
self
):
def
set_test_op
(
self
):
self
.
op
=
F
.
tanh
self
.
op
=
paddle
.
fluid
.
layers
.
exp
self
.
op_attrs
=
{}
self
.
op_attrs
=
{}
class
TestLog
(
TestRelu
):
class
TestFloor
(
TestBase
):
@
property
def
fp16_enabled
(
self
):
return
False
def
set_test_op
(
self
):
self
.
op
=
paddle
.
fluid
.
layers
.
floor
self
.
op_attrs
=
{}
class
TestLog
(
TestBase
):
def
set_test_op
(
self
):
def
set_test_op
(
self
):
self
.
op
=
paddle
.
fluid
.
layers
.
log
self
.
op
=
paddle
.
fluid
.
layers
.
log
self
.
op_attrs
=
{}
self
.
op_attrs
=
{}
class
TestSigmoid
(
TestRelu
):
class
TestReciprocal
(
TestBase
):
def
set_test_op
(
self
):
self
.
op
=
paddle
.
fluid
.
layers
.
reciprocal
self
.
op_attrs
=
{}
class
TestRelu
(
TestBase
):
def
set_test_op
(
self
):
self
.
op
=
paddle
.
fluid
.
layers
.
relu
self
.
op_attrs
=
{}
class
TestRound
(
TestBase
):
def
set_test_op
(
self
):
def
set_test_op
(
self
):
self
.
op
=
F
.
sigmoi
d
self
.
op
=
paddle
.
fluid
.
layers
.
roun
d
self
.
op_attrs
=
{}
self
.
op_attrs
=
{}
class
TestSqrt
(
TestRelu
):
class
TestSigmoid
(
TestBase
):
def
set_test_op
(
self
):
self
.
op
=
paddle
.
fluid
.
layers
.
sigmoid
self
.
op_attrs
=
{}
class
TestSign
(
TestBase
):
def
set_test_op
(
self
):
self
.
op
=
paddle
.
fluid
.
layers
.
sign
self
.
op_attrs
=
{}
class
TestSin
(
TestBase
):
def
set_test_op
(
self
):
self
.
op
=
paddle
.
fluid
.
layers
.
sin
self
.
op_attrs
=
{}
class
TestSoftplus
(
TestBase
):
def
set_test_op
(
self
):
self
.
op
=
paddle
.
fluid
.
layers
.
softplus
self
.
op_attrs
=
{}
class
TestSoftsign
(
TestBase
):
def
set_test_op
(
self
):
self
.
op
=
paddle
.
fluid
.
layers
.
softsign
self
.
op_attrs
=
{}
class
TestSqrt
(
TestBase
):
def
set_test_op
(
self
):
def
set_test_op
(
self
):
self
.
op
=
paddle
.
fluid
.
layers
.
sqrt
self
.
op
=
paddle
.
fluid
.
layers
.
sqrt
self
.
op_attrs
=
{}
self
.
op_attrs
=
{}
class
TestTan
(
TestBase
):
def
set_test_op
(
self
):
self
.
op
=
paddle
.
fluid
.
layers
.
tan
self
.
op_attrs
=
{}
class
TestTanh
(
TestBase
):
def
set_test_op
(
self
):
self
.
op
=
paddle
.
fluid
.
layers
.
tanh
self
.
op_attrs
=
{}
if
__name__
==
"__main__"
:
if
__name__
==
"__main__"
:
unittest
.
main
()
unittest
.
main
()
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录