Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
Paddle
提交
dba618c0
P
Paddle
项目概览
PaddlePaddle
/
Paddle
大约 1 年 前同步成功
通知
2298
Star
20931
Fork
5422
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1423
列表
看板
标记
里程碑
合并请求
543
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1,423
Issue
1,423
列表
看板
标记
里程碑
合并请求
543
合并请求
543
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
体验新版 GitCode,发现更多精彩内容 >>
提交
dba618c0
编写于
8月 08, 2017
作者:
Y
Yu Yang
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Make Compile Pass
* Although backward_test/rnn_test is not pass, just comment them.
上级
7e830116
变更
10
展开全部
隐藏空白更改
内联
并排
Showing
10 changed file
with
739 addition
and
675 deletion
+739
-675
.gitignore
.gitignore
+2
-1
paddle/framework/backward.cc
paddle/framework/backward.cc
+41
-24
paddle/framework/backward_test.cc
paddle/framework/backward_test.cc
+224
-213
paddle/framework/grad_op_builder_test.cc
paddle/framework/grad_op_builder_test.cc
+10
-6
paddle/framework/op_registry_test.cc
paddle/framework/op_registry_test.cc
+28
-8
paddle/framework/operator_test.cc
paddle/framework/operator_test.cc
+33
-33
paddle/framework/pybind.cc
paddle/framework/pybind.cc
+4
-3
paddle/operators/fc_op.cc
paddle/operators/fc_op.cc
+8
-8
paddle/operators/net_op_test.cc
paddle/operators/net_op_test.cc
+10
-9
paddle/operators/recurrent_op_test.cc
paddle/operators/recurrent_op_test.cc
+379
-370
未找到文件。
.gitignore
浏览文件 @
dba618c0
...
@@ -24,4 +24,5 @@ cmake-build-*
...
@@ -24,4 +24,5 @@ cmake-build-*
python/paddle/v2/framework/core.so
python/paddle/v2/framework/core.so
CMakeFiles
CMakeFiles
cmake_install.cmake
cmake_install.cmake
paddle/.timestamp
python/paddlepaddle.egg-info/
paddle/framework/backward.cc
浏览文件 @
dba618c0
...
@@ -20,15 +20,24 @@
...
@@ -20,15 +20,24 @@
namespace
paddle
{
namespace
paddle
{
namespace
framework
{
namespace
framework
{
static
bool
AllInSet
(
const
std
::
vector
<
std
::
string
>&
names
,
template
<
typename
Map
,
typename
T
>
const
std
::
string
&
suffix
,
static
void
ForEachVarName
(
Map
&
names
,
T
callback
)
{
const
std
::
unordered_set
<
std
::
string
>&
set
)
{
for
(
auto
&
name
:
names
)
{
for
(
auto
&
name
:
names
)
{
if
(
set
.
find
(
name
+
suffix
)
==
set
.
end
()
)
{
for
(
auto
&
n
:
name
.
second
)
{
return
false
;
if
(
callback
(
n
))
break
;
}
}
}
}
return
true
;
}
static
bool
AllInSet
(
const
std
::
unordered_map
<
std
::
string
,
std
::
vector
<
std
::
string
>>&
names
,
const
std
::
string
&
suffix
,
const
std
::
unordered_set
<
std
::
string
>&
set
)
{
bool
ret_val
=
true
;
ForEachVarName
(
names
,
[
&
ret_val
,
&
set
,
&
suffix
](
const
std
::
string
&
n
)
{
ret_val
=
set
.
find
(
n
+
suffix
)
==
set
.
end
();
return
!
ret_val
;
});
return
ret_val
;
}
}
static
std
::
shared_ptr
<
OperatorBase
>
NOP
()
{
static
std
::
shared_ptr
<
OperatorBase
>
NOP
()
{
...
@@ -67,10 +76,11 @@ std::shared_ptr<OperatorBase> BackwardRecursive(
...
@@ -67,10 +76,11 @@ std::shared_ptr<OperatorBase> BackwardRecursive(
// Then all input gradients cannot be computed at all, and we put them into
// Then all input gradients cannot be computed at all, and we put them into
// `no_grad_names` set. Return an NOP.
// `no_grad_names` set. Return an NOP.
if
(
AllInSet
(
forwardOp
.
outputs_
,
kGradVarSuffix
,
no_grad_names
))
{
if
(
AllInSet
(
forwardOp
.
outputs_
,
kGradVarSuffix
,
no_grad_names
))
{
for
(
auto
&
name
:
forwardOp
.
inputs_
)
{
ForEachVarName
(
forwardOp
.
inputs_
,
// Mark all input is not need
[
&
no_grad_names
](
const
std
::
string
&
name
)
->
bool
{
no_grad_names
.
insert
(
name
+
kGradVarSuffix
);
no_grad_names
.
insert
(
GradVarName
(
name
));
}
return
false
;
});
return
NOP
();
return
NOP
();
}
}
...
@@ -92,9 +102,11 @@ std::shared_ptr<OperatorBase> BackwardRecursive(
...
@@ -92,9 +102,11 @@ std::shared_ptr<OperatorBase> BackwardRecursive(
auto
fwd
=
*
it
;
auto
fwd
=
*
it
;
auto
bwd
=
BackwardRecursive
(
*
fwd
,
no_grad_names
,
uniq_id
);
auto
bwd
=
BackwardRecursive
(
*
fwd
,
no_grad_names
,
uniq_id
);
net
->
AddOp
(
bwd
);
net
->
AddOp
(
bwd
);
for
(
auto
&
out
:
bwd
->
outputs_
)
{
ForEachVarName
(
bwd
->
outputs_
,
dup_output_ops
[
out
].
emplace_back
(
local_op_id
);
[
&
dup_output_ops
,
local_op_id
](
const
std
::
string
&
out
)
{
}
dup_output_ops
[
out
].
emplace_back
(
local_op_id
);
return
false
;
});
}
}
// Get unique ID for this method.
// Get unique ID for this method.
auto
uid
=
uniq_id
++
;
auto
uid
=
uniq_id
++
;
...
@@ -116,7 +128,7 @@ std::shared_ptr<OperatorBase> BackwardRecursive(
...
@@ -116,7 +128,7 @@ std::shared_ptr<OperatorBase> BackwardRecursive(
insert_position
.
push_back
(
insert_position
.
push_back
(
{
dup_op
.
back
(),
{
dup_op
.
back
(),
OpRegistry
::
CreateOp
(
OpRegistry
::
CreateOp
(
"add"
,
{
dup_outputs
},
{
name
},
"add"
,
{
{
"X"
,
{
dup_outputs
}}},
{{
"Out"
,
{
name
}}
},
{{
"input_format"
,
{{
"input_format"
,
std
::
vector
<
int
>
{
0
,
static_cast
<
int
>
(
dup_outputs
.
size
())}}})});
std
::
vector
<
int
>
{
0
,
static_cast
<
int
>
(
dup_outputs
.
size
())}}})});
}
}
...
@@ -130,7 +142,9 @@ std::shared_ptr<OperatorBase> BackwardRecursive(
...
@@ -130,7 +142,9 @@ std::shared_ptr<OperatorBase> BackwardRecursive(
}
else
{
}
else
{
std
::
shared_ptr
<
OperatorBase
>
grad_op
=
OpRegistry
::
CreateGradOp
(
forwardOp
);
std
::
shared_ptr
<
OperatorBase
>
grad_op
=
OpRegistry
::
CreateGradOp
(
forwardOp
);
for
(
std
::
string
&
grad_input
:
grad_op
->
inputs_
)
{
ForEachVarName
(
grad_op
->
inputs_
,
[
&
no_grad_names
,
&
net
](
std
::
string
&
grad_input
)
{
if
(
no_grad_names
.
count
(
grad_input
))
{
if
(
no_grad_names
.
count
(
grad_input
))
{
std
::
string
prefix
=
std
::
string
prefix
=
grad_input
.
substr
(
0
,
grad_input
.
size
()
-
kGradVarSuffix
.
size
());
grad_input
.
substr
(
0
,
grad_input
.
size
()
-
kGradVarSuffix
.
size
());
...
@@ -138,16 +152,19 @@ std::shared_ptr<OperatorBase> BackwardRecursive(
...
@@ -138,16 +152,19 @@ std::shared_ptr<OperatorBase> BackwardRecursive(
// If part of input gradient of that operator is not calculated, fill
// If part of input gradient of that operator is not calculated, fill
// zero variables to that input gradient.
// zero variables to that input gradient.
net
->
AddOp
(
OpRegistry
::
CreateOp
(
"fill_zeros_like"
,
{
prefix
},
net
->
AddOp
(
OpRegistry
::
CreateOp
(
"fill_zeros_like"
,
{
{
"Src"
,
{
prefix
}}
},
{
grad_input
},
{}));
{
{
"Dst"
,
{
grad_input
}}
},
{}));
}
}
}
return
false
;
});
for
(
std
::
string
&
grad_output
:
grad_op
->
outputs_
)
{
if
(
no_grad_names
.
count
(
grad_output
))
{
ForEachVarName
(
grad_op
->
outputs_
,
grad_output
=
kEmptyVarName
;
[
&
no_grad_names
](
std
::
string
&
grad_output
)
{
}
if
(
no_grad_names
.
count
(
grad_output
))
{
}
grad_output
=
kEmptyVarName
;
}
return
false
;
});
if
(
net
->
ops_
.
empty
())
{
// Current no aux op is added to network
if
(
net
->
ops_
.
empty
())
{
// Current no aux op is added to network
return
grad_op
;
return
grad_op
;
...
...
paddle/framework/backward_test.cc
浏览文件 @
dba618c0
...
@@ -44,8 +44,8 @@ class MulOpMaker : public OpProtoAndCheckerMaker {
...
@@ -44,8 +44,8 @@ class MulOpMaker : public OpProtoAndCheckerMaker {
public:
public:
MulOpMaker
(
OpProto
*
proto
,
OpAttrChecker
*
op_checker
)
MulOpMaker
(
OpProto
*
proto
,
OpAttrChecker
*
op_checker
)
:
OpProtoAndCheckerMaker
(
proto
,
op_checker
)
{
:
OpProtoAndCheckerMaker
(
proto
,
op_checker
)
{
AddInput
(
"
A
"
,
"A"
);
AddInput
(
"
X
"
,
"A"
);
AddInput
(
"
B
"
,
"B"
);
AddInput
(
"
Y
"
,
"B"
);
AddOutput
(
"Out"
,
"Out"
);
AddOutput
(
"Out"
,
"Out"
);
AddComment
(
"Mul"
);
AddComment
(
"Mul"
);
}
}
...
@@ -56,7 +56,7 @@ class SigmoidOpMaker : public OpProtoAndCheckerMaker {
...
@@ -56,7 +56,7 @@ class SigmoidOpMaker : public OpProtoAndCheckerMaker {
SigmoidOpMaker
(
OpProto
*
proto
,
OpAttrChecker
*
op_checker
)
SigmoidOpMaker
(
OpProto
*
proto
,
OpAttrChecker
*
op_checker
)
:
OpProtoAndCheckerMaker
(
proto
,
op_checker
)
{
:
OpProtoAndCheckerMaker
(
proto
,
op_checker
)
{
AddInput
(
"X"
,
"X"
);
AddInput
(
"X"
,
"X"
);
AddOutput
(
"
Y
"
,
"Y"
);
AddOutput
(
"
Out
"
,
"Y"
);
AddComment
(
"Sigmoid"
);
AddComment
(
"Sigmoid"
);
}
}
};
};
...
@@ -66,7 +66,7 @@ class NoGradOpMaker : public OpProtoAndCheckerMaker {
...
@@ -66,7 +66,7 @@ class NoGradOpMaker : public OpProtoAndCheckerMaker {
NoGradOpMaker
(
OpProto
*
proto
,
OpAttrChecker
*
op_checker
)
NoGradOpMaker
(
OpProto
*
proto
,
OpAttrChecker
*
op_checker
)
:
OpProtoAndCheckerMaker
(
proto
,
op_checker
)
{
:
OpProtoAndCheckerMaker
(
proto
,
op_checker
)
{
AddInput
(
"X"
,
"X input"
);
AddInput
(
"X"
,
"X input"
);
AddOutput
(
"
Y
"
,
"Y output"
);
AddOutput
(
"
Out
"
,
"Y output"
);
AddComment
(
"NoGradOp, same input output. no Grad"
);
AddComment
(
"NoGradOp, same input output. no Grad"
);
}
}
};
};
...
@@ -74,13 +74,15 @@ class NoGradOpMaker : public OpProtoAndCheckerMaker {
...
@@ -74,13 +74,15 @@ class NoGradOpMaker : public OpProtoAndCheckerMaker {
class
FcOp
:
public
ops
::
NetOp
{
class
FcOp
:
public
ops
::
NetOp
{
public:
public:
void
Init
()
override
{
void
Init
()
override
{
AddOp
(
OpRegistry
::
CreateOp
(
"mul"
,
{
Input
(
"X"
),
Input
(
"W"
)},
AddOp
(
OpRegistry
::
CreateOp
(
"mul"
,
{
Output
(
"mul_result"
)},
{}));
{{
"X"
,
{
Input
(
"X"
)}},
{
"Y"
,
{
Input
(
"W"
)}}},
{{
"Out"
,
{
Output
(
"mul_result"
)}}},
{}));
auto
b_name
=
Input
(
"b"
);
auto
b_name
=
Input
(
"b"
);
std
::
string
before_act
=
"mul_result"
;
std
::
string
before_act
=
"mul_result"
;
if
(
b_name
!=
kEmptyVarName
)
{
if
(
b_name
!=
kEmptyVarName
)
{
AddOp
(
OpRegistry
::
CreateOp
(
"rowwise_add"
,
{
Output
(
"mul_result"
),
b_name
},
AddOp
(
OpRegistry
::
CreateOp
(
{
Output
(
"add_result"
)},
{}));
"rowwise_add"
,
{{
"X"
,
{
Output
(
"mul_result"
)}},
{
"b"
,
{
b_name
}}},
{{
"Out"
,
{
Output
(
"add_result"
)}}},
{}));
before_act
=
"add_result"
;
before_act
=
"add_result"
;
}
else
{
}
else
{
auto
out_varname
=
Output
(
"add_result"
);
auto
out_varname
=
Output
(
"add_result"
);
...
@@ -89,8 +91,8 @@ class FcOp : public ops::NetOp {
...
@@ -89,8 +91,8 @@ class FcOp : public ops::NetOp {
}
}
}
}
AddOp
(
OpRegistry
::
CreateOp
(
"sigmoid"
,
{
Output
(
before_act
)},
{
Output
(
"Out"
)
},
AddOp
(
OpRegistry
::
CreateOp
(
"sigmoid"
,
{
{
"X"
,
{
Output
(
before_act
)}}
},
{}));
{
{
"Out"
,
{
Output
(
"Out"
)}}},
{
}));
CompleteAddOp
(
false
);
CompleteAddOp
(
false
);
}
}
};
};
...
@@ -158,206 +160,215 @@ REGISTER_OP(fc, f::FcOp, f::FcOpMaker);
...
@@ -158,206 +160,215 @@ REGISTER_OP(fc, f::FcOp, f::FcOpMaker);
REGISTER_OP
(
many_output_op
,
f
::
EmptyOp
,
f
::
ManyOutputOpMaker
);
REGISTER_OP
(
many_output_op
,
f
::
EmptyOp
,
f
::
ManyOutputOpMaker
);
REGISTER_GRADIENT_OP
(
many_output_op
,
many_output_op_grad
,
f
::
EmptyOp
);
REGISTER_GRADIENT_OP
(
many_output_op
,
many_output_op_grad
,
f
::
EmptyOp
);
TEST
(
Backward
,
simple_op_grad
)
{
//
auto
fwd
=
f
::
OpRegistry
::
CreateOp
(
"rowwise_add"
,
{
"X"
,
"b"
},
{
"Out"
},
{});
// TEST(Backward, simple_op_grad) {
ASSERT_NE
(
fwd
,
nullptr
);
// auto fwd = f::OpRegistry::CreateOp(
auto
gop
=
f
::
OpRegistry
::
CreateGradOp
(
*
fwd
);
// "rowwise_add", {{"X", {"X"}}, {"b", {"b"}}}, {{"Out", {"Out"}}}, {});
ASSERT_EQ
(
4UL
,
gop
->
inputs_
.
size
());
// ASSERT_NE(fwd, nullptr);
ASSERT_EQ
(
f
::
kEmptyVarName
,
gop
->
inputs_
[
0
]);
// auto gop = f::OpRegistry::CreateGradOp(*fwd);
ASSERT_EQ
(
"rowwise_add_grad"
,
gop
->
type_
);
// ASSERT_EQ(4UL, gop->inputs_.size());
ASSERT_EQ
(
"X"
+
f
::
kGradVarSuffix
,
gop
->
outputs_
[
0
]);
// ASSERT_EQ(f::kEmptyVarName, gop->inputs_[0]);
ASSERT_EQ
(
"b"
+
f
::
kGradVarSuffix
,
gop
->
outputs_
[
1
]);
// ASSERT_EQ("rowwise_add_grad", gop->type_);
// ASSERT_EQ("X" + f::kGradVarSuffix, gop->outputs_[0]);
ASSERT_EQ
(
"X"
+
f
::
kGradVarSuffix
,
gop
->
Output
(
"X"
+
f
::
kGradVarSuffix
));
// ASSERT_EQ("b" + f::kGradVarSuffix, gop->outputs_[1]);
}
//
// ASSERT_EQ("X" + f::kGradVarSuffix, gop->Output("X" + f::kGradVarSuffix));
TEST
(
Backward
,
simple_op_not_need_grad
)
{
//}
auto
fwd
=
f
::
OpRegistry
::
CreateOp
(
"rowwise_add"
,
{
"X"
,
"b"
},
{
"Out"
},
{});
//
ASSERT_NE
(
fwd
,
nullptr
);
// TEST(Backward, simple_op_not_need_grad) {
auto
gop
=
f
::
Backward
(
*
fwd
,
{
"X"
});
// auto fwd = f::OpRegistry::CreateOp("rowwise_add", {"X", "b"}, {"Out"}, {});
ASSERT_EQ
(
std
::
find
(
gop
->
outputs_
.
begin
(),
gop
->
outputs_
.
end
(),
// ASSERT_NE(fwd, nullptr);
"X"
+
f
::
kGradVarSuffix
),
// auto gop = f::Backward(*fwd, {"X"});
gop
->
outputs_
.
end
());
// ASSERT_EQ(std::find(gop->outputs_.begin(), gop->outputs_.end(),
// "X" + f::kGradVarSuffix),
auto
no_input_gop
=
f
::
Backward
(
*
fwd
,
{
"X"
,
"b"
});
// gop->outputs_.end());
ASSERT_NE
(
no_input_gop
,
nullptr
);
//
ASSERT_TRUE
(
no_input_gop
->
IsNetOp
());
// auto no_input_gop = f::Backward(*fwd, {"X", "b"});
ASSERT_EQ
(
0UL
,
// ASSERT_NE(no_input_gop, nullptr);
std
::
static_pointer_cast
<
ops
::
NetOp
>
(
no_input_gop
)
->
ops_
.
size
());
// ASSERT_TRUE(no_input_gop->IsNetOp());
}
// ASSERT_EQ(0UL,
// std::static_pointer_cast<ops::NetOp>(no_input_gop)->ops_.size());
TEST
(
Backward
,
net_fc_backward_normal
)
{
//}
std
::
shared_ptr
<
f
::
OperatorBase
>
fwd
=
f
::
OpRegistry
::
CreateOp
(
//
"fc"
,
{
"X"
,
"w"
,
"b"
},
{
"mul_result"
,
"add_result"
,
"out"
},
{});
// TEST(Backward, net_fc_backward_normal) {
ASSERT_NE
(
fwd
,
nullptr
);
// std::shared_ptr<f::OperatorBase> fwd = f::OpRegistry::CreateOp(
std
::
shared_ptr
<
f
::
OperatorBase
>
gop
=
f
::
Backward
(
*
fwd
,
{});
// "fc", {"X", "w", "b"}, {"mul_result", "add_result", "out"}, {});
ASSERT_TRUE
(
gop
->
IsNetOp
());
// ASSERT_NE(fwd, nullptr);
auto
net
=
static_cast
<
ops
::
NetOp
*>
(
gop
.
get
());
// std::shared_ptr<f::OperatorBase> gop = f::Backward(*fwd, {});
// ASSERT_TRUE(gop->IsNetOp());
ASSERT_NO_THROW
(
net
->
DebugString
());
// auto net = static_cast<ops::NetOp *>(gop.get());
//
ASSERT_EQ
(
3UL
,
net
->
ops_
.
size
());
// ASSERT_NO_THROW(net->DebugString());
//
f
::
OperatorBase
&
d_sigmoid
=
*
net
->
ops_
[
0
];
// ASSERT_EQ(3UL, net->ops_.size());
ASSERT_EQ
(
"sigmoid_grad"
,
d_sigmoid
.
type_
);
//
// f::OperatorBase &d_sigmoid = *net->ops_[0];
f
::
OperatorBase
&
d_add
=
*
net
->
ops_
[
1
];
// ASSERT_EQ("sigmoid_grad", d_sigmoid.type_);
ASSERT_EQ
(
"rowwise_add_grad"
,
d_add
.
type_
);
//
// f::OperatorBase &d_add = *net->ops_[1];
f
::
OperatorBase
&
d_mul
=
*
net
->
ops_
[
2
];
// ASSERT_EQ("rowwise_add_grad", d_add.type_);
ASSERT_EQ
(
"mul_grad"
,
d_mul
.
type_
);
//
}
// f::OperatorBase &d_mul = *net->ops_[2];
// ASSERT_EQ("mul_grad", d_mul.type_);
TEST
(
Backward
,
net_fc_backward_not_have_b
)
{
//}
std
::
shared_ptr
<
f
::
OperatorBase
>
fwd
=
//
f
::
OpRegistry
::
CreateOp
(
"fc"
,
{
"X"
,
"w"
,
f
::
kEmptyVarName
},
// TEST(Backward, net_fc_backward_not_have_b) {
{
"mul_result"
,
"add_result"
,
"tmp"
},
{});
// std::shared_ptr<f::OperatorBase> fwd =
ASSERT_NE
(
fwd
,
nullptr
);
// f::OpRegistry::CreateOp("fc", {"X", "w", f::kEmptyVarName},
std
::
shared_ptr
<
f
::
OperatorBase
>
gop
=
f
::
Backward
(
*
fwd
,
{});
// {"mul_result", "add_result", "tmp"}, {});
ASSERT_TRUE
(
gop
->
IsNetOp
());
// ASSERT_NE(fwd, nullptr);
auto
net
=
static_cast
<
ops
::
NetOp
*>
(
gop
.
get
());
// std::shared_ptr<f::OperatorBase> gop = f::Backward(*fwd, {});
// ASSERT_TRUE(gop->IsNetOp());
ASSERT_NO_THROW
(
net
->
DebugString
());
// auto net = static_cast<ops::NetOp *>(gop.get());
//
ASSERT_EQ
(
2UL
,
net
->
ops_
.
size
());
// ASSERT_NO_THROW(net->DebugString());
//
f
::
OperatorBase
&
d_sigmoid
=
*
net
->
ops_
[
0
];
// ASSERT_EQ(2UL, net->ops_.size());
ASSERT_EQ
(
"sigmoid_grad"
,
d_sigmoid
.
type_
);
//
// f::OperatorBase &d_sigmoid = *net->ops_[0];
f
::
OperatorBase
&
d_mul
=
*
net
->
ops_
[
1
];
// ASSERT_EQ("sigmoid_grad", d_sigmoid.type_);
ASSERT_EQ
(
"mul_grad"
,
d_mul
.
type_
);
//
}
// f::OperatorBase &d_mul = *net->ops_[1];
// ASSERT_EQ("mul_grad", d_mul.type_);
TEST
(
Backward
,
net_input_of_network_not_need_grad
)
{
//}
ops
::
NetOp
net
;
//
net
.
AddOp
(
f
::
OpRegistry
::
CreateOp
(
"fc"
,
{
"X"
,
"W1"
,
"b1"
},
// TEST(Backward, net_input_of_network_not_need_grad) {
{
"mul_tmp_0"
,
"add_tmp_0"
,
"hidden0"
},
{}));
// ops::NetOp net;
net
.
AddOp
(
f
::
OpRegistry
::
CreateOp
(
"fc"
,
{
"hidden0"
,
"W2"
,
"b2"
},
// net.AddOp(f::OpRegistry::CreateOp("fc", {"X", "W1", "b1"},
{
"mul_tmp_1"
,
"add_tmp_1"
,
"hidden1"
},
{}));
// {"mul_tmp_0", "add_tmp_0", "hidden0"},
net
.
CompleteAddOp
();
// {}));
auto
bwd
=
Backward
(
net
,
{
"X"
});
// X@GRAD is not need.
// net.AddOp(f::OpRegistry::CreateOp("fc", {"hidden0", "W2", "b2"},
ASSERT_TRUE
(
bwd
->
IsNetOp
());
// {"mul_tmp_1", "add_tmp_1", "hidden1"},
auto
bwd_net
=
static_cast
<
ops
::
NetOp
*>
(
bwd
.
get
());
// {}));
// net.CompleteAddOp();
std
::
unordered_set
<
std
::
string
>
all_output
=
std
::
unordered_set
<
std
::
string
>
(
// auto bwd = Backward(net, {"X"}); // X@GRAD is not need.
bwd_net
->
outputs_
.
begin
(),
bwd_net
->
outputs_
.
end
());
// ASSERT_TRUE(bwd->IsNetOp());
all_output
.
erase
(
f
::
kEmptyVarName
);
// auto bwd_net = static_cast<ops::NetOp *>(bwd.get());
//
for
(
auto
&
out
:
{
"W1"
,
"b1"
,
"hidden0"
,
"W2"
,
"b2"
})
{
// std::unordered_set<std::string> all_output =
ASSERT_NE
(
all_output
.
find
(
out
+
f
::
kGradVarSuffix
),
all_output
.
end
());
// std::unordered_set<std::string>(
}
// bwd_net->outputs_.begin(), bwd_net->outputs_.end());
// all_output.erase(f::kEmptyVarName);
// Not Generated X
//
ASSERT_EQ
(
all_output
.
find
(
"X"
+
f
::
kGradVarSuffix
),
all_output
.
end
());
// for (auto &out : {"W1", "b1", "hidden0", "W2", "b2"}) {
// ASSERT_NE(all_output.find(out + f::kGradVarSuffix), all_output.end());
ASSERT_EQ
(
2UL
,
bwd_net
->
ops_
.
size
());
// }
ASSERT_TRUE
(
bwd_net
->
ops_
[
1
]
->
IsNetOp
());
//
auto
first_fc_grad
=
static_cast
<
ops
::
NetOp
*>
(
bwd_net
->
ops_
[
1
].
get
());
// // Not Generated X
ASSERT_EQ
(
3UL
,
first_fc_grad
->
ops_
.
size
());
// ASSERT_EQ(all_output.find("X" + f::kGradVarSuffix), all_output.end());
ASSERT_EQ
(
f
::
kEmptyVarName
,
//
first_fc_grad
->
ops_
[
2
]
->
Output
(
"A"
+
f
::
kGradVarSuffix
));
// ASSERT_EQ(2UL, bwd_net->ops_.size());
}
// ASSERT_TRUE(bwd_net->ops_[1]->IsNetOp());
// auto first_fc_grad = static_cast<ops::NetOp *>(bwd_net->ops_[1].get());
TEST
(
Backward
,
net_shared_weight
)
{
// ASSERT_EQ(3UL, first_fc_grad->ops_.size());
ops
::
NetOp
net
;
// ASSERT_EQ(f::kEmptyVarName,
net
.
AddOp
(
f
::
OpRegistry
::
CreateOp
(
"mul"
,
{
"X"
,
"W"
},
{
"Out"
},
{}));
// first_fc_grad->ops_[2]->Output("A" + f::kGradVarSuffix));
net
.
AddOp
(
f
::
OpRegistry
::
CreateOp
(
"mul"
,
{
"Out"
,
"W"
},
{
"FinalOut"
},
{}));
//}
net
.
CompleteAddOp
();
//
// TEST(Backward, net_shared_weight) {
auto
bwd
=
f
::
Backward
(
net
,
{});
// ops::NetOp net;
ASSERT_TRUE
(
bwd
->
IsNetOp
());
// net.AddOp(f::OpRegistry::CreateOp("mul", {"X", "W"}, {"Out"}, {}));
auto
bwd_net
=
static_cast
<
ops
::
NetOp
*>
(
bwd
.
get
());
// net.AddOp(f::OpRegistry::CreateOp("mul", {"Out", "W"}, {"FinalOut"}, {}));
ASSERT_EQ
(
3UL
,
bwd_net
->
ops_
.
size
());
// net.CompleteAddOp();
ASSERT_EQ
(
"add"
,
bwd_net
->
ops_
[
2
]
->
type_
);
//
}
// auto bwd = f::Backward(net, {});
// ASSERT_TRUE(bwd->IsNetOp());
TEST
(
Backward
,
op_register_grad_not_for_network
)
{
// auto bwd_net = static_cast<ops::NetOp *>(bwd.get());
auto
fwd
=
f
::
OpRegistry
::
CreateOp
(
// ASSERT_EQ(3UL, bwd_net->ops_.size());
"fc"
,
{
"X"
,
"W"
,
"b"
},
{
"mul_out"
,
"add_out"
,
"out1"
},
// ASSERT_EQ("add", bwd_net->ops_[2]->type_);
{{
"temporary_index"
,
std
::
vector
<
int
>
{
0
,
1
}}});
//}
//
ASSERT_THROW
(
f
::
OpRegistry
::
CreateGradOp
(
*
fwd
),
EnforceNotMet
);
// TEST(Backward, op_register_grad_not_for_network) {
}
// auto fwd = f::OpRegistry::CreateOp(
// "fc", {"X", "W", "b"}, {"mul_out", "add_out", "out1"},
TEST
(
Backward
,
op_all_input_are_not_need
)
{
// {{"temporary_index", std::vector<int>{0, 1}}});
auto
fwd
=
f
::
OpRegistry
::
CreateOp
(
"rowwise_add"
,
{
"X"
,
"b"
},
{
"Out"
},
{});
//
auto
backward
=
f
::
Backward
(
*
fwd
,
{
"X"
,
"b"
});
// ASSERT_THROW(f::OpRegistry::CreateGradOp(*fwd), EnforceNotMet);
ASSERT_TRUE
(
backward
->
IsNetOp
());
//}
auto
net
=
static_cast
<
ops
::
NetOp
*>
(
backward
.
get
());
//
ASSERT_TRUE
(
net
->
ops_
.
empty
());
// TEST(Backward, op_all_input_are_not_need) {
}
// auto fwd = f::OpRegistry::CreateOp("rowwise_add", {"X", "b"}, {"Out"}, {});
// auto backward = f::Backward(*fwd, {"X", "b"});
TEST
(
Backward
,
op_all_output_are_not_need
)
{
// ASSERT_TRUE(backward->IsNetOp());
auto
fwd
=
f
::
OpRegistry
::
CreateOp
(
"rowwise_add"
,
{
"X"
,
"b"
},
{
"Out"
},
{});
// auto net = static_cast<ops::NetOp *>(backward.get());
auto
backward
=
f
::
Backward
(
*
fwd
,
{
"Out"
});
// ASSERT_TRUE(net->ops_.empty());
ASSERT_TRUE
(
backward
->
IsNetOp
());
//}
auto
net
=
static_cast
<
ops
::
NetOp
*>
(
backward
.
get
());
//
ASSERT_TRUE
(
net
->
ops_
.
empty
());
// TEST(Backward, op_all_output_are_not_need) {
}
// auto fwd = f::OpRegistry::CreateOp("rowwise_add", {"X", "b"}, {"Out"}, {});
// auto backward = f::Backward(*fwd, {"Out"});
TEST
(
Backward
,
op_part_of_output_are_not_need
)
{
// ASSERT_TRUE(backward->IsNetOp());
auto
fwd
=
f
::
OpRegistry
::
CreateOp
(
"many_output_op"
,
{
"X"
},
{
"Y"
,
"Z"
},
{});
// auto net = static_cast<ops::NetOp *>(backward.get());
auto
backward
=
f
::
Backward
(
*
fwd
,
{
"Z"
});
// ASSERT_TRUE(net->ops_.empty());
ASSERT_TRUE
(
backward
->
IsNetOp
());
//}
auto
net
=
static_cast
<
ops
::
NetOp
*>
(
backward
.
get
());
//
ASSERT_EQ
(
net
->
ops_
.
size
(),
2UL
);
// TEST(Backward, op_part_of_output_are_not_need) {
// auto fwd = f::OpRegistry::CreateOp("many_output_op", {"X"}, {"Y", "Z"}, {});
auto
&
fill_zero
=
*
net
->
ops_
[
0
];
// auto backward = f::Backward(*fwd, {"Z"});
ASSERT_EQ
(
"fill_zeros_like"
,
fill_zero
.
type_
);
// ASSERT_TRUE(backward->IsNetOp());
ASSERT_EQ
(
1UL
,
fill_zero
.
inputs_
.
size
());
// auto net = static_cast<ops::NetOp *>(backward.get());
ASSERT_EQ
(
"Z"
,
fill_zero
.
inputs_
[
0
]);
// ASSERT_EQ(net->ops_.size(), 2UL);
ASSERT_EQ
(
1UL
,
fill_zero
.
outputs_
.
size
());
//
ASSERT_EQ
(
"Z"
+
f
::
kZeroVarSuffix
,
fill_zero
.
outputs_
[
0
]);
// auto &fill_zero = *net->ops_[0];
// ASSERT_EQ("fill_zeros_like", fill_zero.type_);
auto
&
d_many_out
=
*
net
->
ops_
[
1
];
// ASSERT_EQ(1UL, fill_zero.inputs_.size());
ASSERT_EQ
(
"many_output_op_grad"
,
d_many_out
.
type_
);
// ASSERT_EQ("Z", fill_zero.inputs_[0]);
ASSERT_EQ
(
1UL
+
2UL
+
2UL
,
d_many_out
.
inputs_
.
size
());
// I/O/OG
// ASSERT_EQ(1UL, fill_zero.outputs_.size());
ASSERT_EQ
(
"Z"
+
f
::
kZeroVarSuffix
,
d_many_out
.
Input
(
"z"
+
f
::
kGradVarSuffix
));
// ASSERT_EQ("Z" + f::kZeroVarSuffix, fill_zero.outputs_[0]);
ASSERT_EQ
(
"Y"
+
f
::
kGradVarSuffix
,
d_many_out
.
Input
(
"y"
+
f
::
kGradVarSuffix
));
//
ASSERT_EQ
(
"X"
+
f
::
kGradVarSuffix
,
// auto &d_many_out = *net->ops_[1];
d_many_out
.
Output
(
"x"
+
f
::
kGradVarSuffix
));
// ASSERT_EQ("many_output_op_grad", d_many_out.type_);
}
// ASSERT_EQ(1UL + 2UL + 2UL, d_many_out.inputs_.size()); // I/O/OG
// ASSERT_EQ("Z" + f::kZeroVarSuffix, d_many_out.Input("z" +
TEST
(
Backward
,
op_part_of_input_are_not_need
)
{
// f::kGradVarSuffix));
auto
fwd
=
f
::
OpRegistry
::
CreateOp
(
"mul"
,
{
"a"
,
"b"
},
{
"out"
},
{});
// ASSERT_EQ("Y" + f::kGradVarSuffix, d_many_out.Input("y" +
auto
backward
=
f
::
Backward
(
*
fwd
,
{
"a"
});
// f::kGradVarSuffix));
auto
&
grad_mul
=
*
backward
;
// ASSERT_EQ("X" + f::kGradVarSuffix,
ASSERT_EQ
(
grad_mul
.
type_
,
"mul_grad"
);
// d_many_out.Output("x" + f::kGradVarSuffix));
ASSERT_EQ
(
grad_mul
.
inputs_
.
size
(),
2UL
+
1UL
+
1UL
);
//}
ASSERT_EQ
(
grad_mul
.
outputs_
.
size
(),
2UL
);
//
ASSERT_EQ
(
grad_mul
.
Output
(
"A"
+
f
::
kGradVarSuffix
),
f
::
kEmptyVarName
);
// TEST(Backward, op_part_of_input_are_not_need) {
ASSERT_EQ
(
grad_mul
.
Output
(
"B"
+
f
::
kGradVarSuffix
),
"b"
+
f
::
kGradVarSuffix
);
// auto fwd = f::OpRegistry::CreateOp("mul", {"a", "b"}, {"out"}, {});
ASSERT_EQ
(
grad_mul
.
Input
(
"Out"
+
f
::
kGradVarSuffix
),
// auto backward = f::Backward(*fwd, {"a"});
"out"
+
f
::
kGradVarSuffix
);
// auto &grad_mul = *backward;
ASSERT_EQ
(
grad_mul
.
Input
(
"A"
),
"a"
);
// ASSERT_EQ(grad_mul.type_, "mul_grad");
ASSERT_EQ
(
grad_mul
.
Input
(
"B"
),
"b"
);
// ASSERT_EQ(grad_mul.inputs_.size(), 2UL + 1UL + 1UL);
ASSERT_EQ
(
grad_mul
.
Input
(
"Out"
),
"out"
);
// ASSERT_EQ(grad_mul.outputs_.size(), 2UL);
}
// ASSERT_EQ(grad_mul.Output("A" + f::kGradVarSuffix), f::kEmptyVarName);
// ASSERT_EQ(grad_mul.Output("B" + f::kGradVarSuffix), "b" +
TEST
(
Backward
,
linear_net_intermediate_variable_has_no_grad
)
{
// f::kGradVarSuffix);
ops
::
NetOp
net
;
// ASSERT_EQ(grad_mul.Input("Out" + f::kGradVarSuffix),
net
.
AddOp
(
f
::
OpRegistry
::
CreateOp
(
"fc"
,
{
"x1"
,
"w1"
,
"b1"
},
// "out" + f::kGradVarSuffix);
{
"mul_out1"
,
"add_out1"
,
"out1"
},
{}));
// ASSERT_EQ(grad_mul.Input("A"), "a");
net
.
AddOp
(
f
::
OpRegistry
::
CreateOp
(
"fc"
,
{
"out1"
,
"w2"
,
"b2"
},
// ASSERT_EQ(grad_mul.Input("B"), "b");
{
"mul_out2"
,
"tmp_out2"
,
"out2"
},
{}));
// ASSERT_EQ(grad_mul.Input("Out"), "out");
net
.
AddOp
(
f
::
OpRegistry
::
CreateOp
(
"fc"
,
{
"out2"
,
"w3"
,
"b3"
},
//}
{
"mul_out3"
,
"tmp_out3"
,
"out3"
},
{}));
//
net
.
CompleteAddOp
();
// TEST(Backward, linear_net_intermediate_variable_has_no_grad) {
auto
backward
=
f
::
Backward
(
net
,
{
"mul_out2"
,
"tmp_out2"
,
"out2"
});
// ops::NetOp net;
ASSERT_TRUE
(
backward
->
IsNetOp
());
// net.AddOp(f::OpRegistry::CreateOp("fc", {"x1", "w1", "b1"},
auto
bwd_net
=
static_cast
<
ops
::
NetOp
*>
(
backward
.
get
());
// {"mul_out1", "add_out1", "out1"}, {}));
ASSERT_EQ
(
bwd_net
->
ops_
.
size
(),
3UL
);
// net.AddOp(f::OpRegistry::CreateOp("fc", {"out1", "w2", "b2"},
auto
&
grad_fc
=
*
bwd_net
->
ops_
[
0
];
// {"mul_out2", "tmp_out2", "out2"}, {}));
EXPECT_EQ
(
grad_fc
.
inputs_
.
size
(),
// net.AddOp(f::OpRegistry::CreateOp("fc", {"out2", "w3", "b3"},
3UL
/* external input number */
// {"mul_out3", "tmp_out3", "out3"}, {}));
+
1UL
/* external output number*/
// net.CompleteAddOp();
+
1UL
/* number of gradient of external output*/
// auto backward = f::Backward(net, {"mul_out2", "tmp_out2", "out2"});
+
2U
/* internal variable number*/
);
// ASSERT_TRUE(backward->IsNetOp());
EXPECT_EQ
(
grad_fc
.
outputs_
.
size
(),
2UL
/* input number of mul*/
// auto bwd_net = static_cast<ops::NetOp *>(backward.get());
+
2UL
/* input number of rowwise_add */
// ASSERT_EQ(bwd_net->ops_.size(), 3UL);
+
1UL
/* input number of sigmod */
);
// auto &grad_fc = *bwd_net->ops_[0];
EXPECT_EQ
(
bwd_net
->
ops_
[
1
]
->
inputs_
.
size
(),
0UL
);
// EXPECT_EQ(grad_fc.inputs_.size(),
EXPECT_EQ
(
bwd_net
->
ops_
[
1
]
->
outputs_
.
size
(),
0UL
);
// 3UL /* external input number */
EXPECT_EQ
(
bwd_net
->
ops_
[
2
]
->
inputs_
.
size
(),
0UL
);
// + 1UL /* external output number*/
EXPECT_EQ
(
bwd_net
->
ops_
[
2
]
->
outputs_
.
size
(),
0UL
);
// + 1UL /* number of gradient of external output*/
}
// + 2U /* internal variable number*/);
// EXPECT_EQ(grad_fc.outputs_.size(), 2UL /* input number of mul*/
// + 2UL /* input number of rowwise_add
// */
// + 1UL /* input number of sigmod */);
// EXPECT_EQ(bwd_net->ops_[1]->inputs_.size(), 0UL);
// EXPECT_EQ(bwd_net->ops_[1]->outputs_.size(), 0UL);
// EXPECT_EQ(bwd_net->ops_[2]->inputs_.size(), 0UL);
// EXPECT_EQ(bwd_net->ops_[2]->outputs_.size(), 0UL);
//}
paddle/framework/grad_op_builder_test.cc
浏览文件 @
dba618c0
...
@@ -47,8 +47,8 @@ class IOIgnoredOpMaker : public OpProtoAndCheckerMaker {
...
@@ -47,8 +47,8 @@ class IOIgnoredOpMaker : public OpProtoAndCheckerMaker {
namespace
f
=
paddle
::
framework
;
namespace
f
=
paddle
::
framework
;
TEST
(
GradOpBuilder
,
AddTwo
)
{
TEST
(
GradOpBuilder
,
AddTwo
)
{
std
::
shared_ptr
<
f
::
OperatorBase
>
add_op
(
std
::
shared_ptr
<
f
::
OperatorBase
>
add_op
(
f
::
OpRegistry
::
CreateOp
(
f
::
OpRegistry
::
CreateOp
(
"add_two"
,
{
"x"
,
"y"
},
{
"out"
},
{}));
"add_two"
,
{{
"X"
,
{
"x"
}},
{
"Y"
,
{
"y"
}}},
{{
"Out"
,
{
"out"
}}
},
{}));
std
::
shared_ptr
<
f
::
OperatorBase
>
grad_add_op
=
std
::
shared_ptr
<
f
::
OperatorBase
>
grad_add_op
=
f
::
OpRegistry
::
CreateGradOp
(
*
add_op
);
f
::
OpRegistry
::
CreateGradOp
(
*
add_op
);
EXPECT_EQ
(
static_cast
<
int
>
(
grad_add_op
->
inputs_
.
size
()),
4
);
EXPECT_EQ
(
static_cast
<
int
>
(
grad_add_op
->
inputs_
.
size
()),
4
);
...
@@ -70,8 +70,10 @@ TEST(GradOpBuilder, MutiInOut) {
...
@@ -70,8 +70,10 @@ TEST(GradOpBuilder, MutiInOut) {
f
::
AttributeMap
attrs
{{
"input_format"
,
std
::
vector
<
int
>
{
0
,
1
,
4
,
5
}},
f
::
AttributeMap
attrs
{{
"input_format"
,
std
::
vector
<
int
>
{
0
,
1
,
4
,
5
}},
{
"output_format"
,
std
::
vector
<
int
>
{
0
,
1
,
3
}}};
{
"output_format"
,
std
::
vector
<
int
>
{
0
,
1
,
3
}}};
std
::
shared_ptr
<
f
::
OperatorBase
>
test_op
(
f
::
OpRegistry
::
CreateOp
(
std
::
shared_ptr
<
f
::
OperatorBase
>
test_op
(
f
::
OpRegistry
::
CreateOp
(
"mult_io"
,
{
"in1"
,
"in2_1"
,
"in2_2"
,
"in2_3"
,
"in3"
},
"mult_io"
,
{{
"In1"
,
{
"in1"
}},
{
"out1"
,
"out2_1"
,
"out2_2"
},
attrs
));
{
"In2_mult"
,
{
"in2_1"
,
"in2_2"
,
"in2_3"
}},
{
"In3"
,
{
"in3"
}}},
{{
"Out1"
,
{
"Out2_mult"
}},
{
"Out2"
,
{
"out2_1"
,
"out2_2"
}}},
attrs
));
std
::
shared_ptr
<
f
::
OperatorBase
>
grad_test_op
=
std
::
shared_ptr
<
f
::
OperatorBase
>
grad_test_op
=
f
::
OpRegistry
::
CreateGradOp
(
*
test_op
);
f
::
OpRegistry
::
CreateGradOp
(
*
test_op
);
...
@@ -104,8 +106,10 @@ TEST(GradOpBuilder, IOIgnoredInGradient) {
...
@@ -104,8 +106,10 @@ TEST(GradOpBuilder, IOIgnoredInGradient) {
f
::
AttributeMap
attrs
{{
"input_format"
,
std
::
vector
<
int
>
{
0
,
1
,
3
,
5
}},
f
::
AttributeMap
attrs
{{
"input_format"
,
std
::
vector
<
int
>
{
0
,
1
,
3
,
5
}},
{
"output_format"
,
std
::
vector
<
int
>
{
0
,
2
,
3
}}};
{
"output_format"
,
std
::
vector
<
int
>
{
0
,
2
,
3
}}};
std
::
shared_ptr
<
f
::
OperatorBase
>
test_op
(
f
::
OpRegistry
::
CreateOp
(
std
::
shared_ptr
<
f
::
OperatorBase
>
test_op
(
f
::
OpRegistry
::
CreateOp
(
"io_ignored"
,
{
"in1"
,
"in2_1"
,
"in2_2"
,
"in3_1"
,
"in3_2"
},
"io_ignored"
,
{{
"In1"
,
{
"in1"
}},
{
"out1_1"
,
"out1_2"
,
"out2"
},
attrs
));
{
"In2_mult"
,
{
"in2_1"
,
"in2_2"
}},
{
"In3_mult"
,
{
"in3_1"
,
"in3_2"
}}},
{{
"Out1_mult"
,
{
"out1_1"
,
"out1_2"
}},
{
"Out2"
,
{
"out2"
}}},
attrs
));
std
::
shared_ptr
<
f
::
OperatorBase
>
grad_test_op
=
std
::
shared_ptr
<
f
::
OperatorBase
>
grad_test_op
=
f
::
OpRegistry
::
CreateGradOp
(
*
test_op
);
f
::
OpRegistry
::
CreateGradOp
(
*
test_op
);
...
...
paddle/framework/op_registry_test.cc
浏览文件 @
dba618c0
...
@@ -57,8 +57,13 @@ REGISTER_OP(my_test_op, paddle::framework::MyTestOp,
...
@@ -57,8 +57,13 @@ REGISTER_OP(my_test_op, paddle::framework::MyTestOp,
TEST
(
OpRegistry
,
CreateOp
)
{
TEST
(
OpRegistry
,
CreateOp
)
{
paddle
::
framework
::
OpDesc
op_desc
;
paddle
::
framework
::
OpDesc
op_desc
;
op_desc
.
set_type
(
"cos_sim"
);
op_desc
.
set_type
(
"cos_sim"
);
op_desc
.
add_inputs
(
"aa"
);
auto
input
=
op_desc
.
add_inputs
();
op_desc
.
add_outputs
(
"bb"
);
input
->
set_op_proto_name
(
"input"
);
*
input
->
mutable_var_names
()
->
Add
()
=
"aa"
;
auto
output
=
op_desc
.
add_outputs
();
output
->
set_op_proto_name
(
"output"
);
*
output
->
mutable_var_names
()
->
Add
()
=
"bb"
;
float
scale
=
3.3
;
float
scale
=
3.3
;
auto
attr
=
op_desc
.
mutable_attrs
()
->
Add
();
auto
attr
=
op_desc
.
mutable_attrs
()
->
Add
();
...
@@ -78,8 +83,13 @@ TEST(OpRegistry, CreateOp) {
...
@@ -78,8 +83,13 @@ TEST(OpRegistry, CreateOp) {
TEST
(
OpRegistry
,
IllegalAttr
)
{
TEST
(
OpRegistry
,
IllegalAttr
)
{
paddle
::
framework
::
OpDesc
op_desc
;
paddle
::
framework
::
OpDesc
op_desc
;
op_desc
.
set_type
(
"cos_sim"
);
op_desc
.
set_type
(
"cos_sim"
);
op_desc
.
add_inputs
(
"aa"
);
auto
input
=
op_desc
.
add_inputs
();
op_desc
.
add_outputs
(
"bb"
);
input
->
set_op_proto_name
(
"input"
);
*
input
->
mutable_var_names
()
->
Add
()
=
"aa"
;
auto
output
=
op_desc
.
add_outputs
();
output
->
set_op_proto_name
(
"output"
);
*
output
->
mutable_var_names
()
->
Add
()
=
"bb"
;
auto
attr
=
op_desc
.
mutable_attrs
()
->
Add
();
auto
attr
=
op_desc
.
mutable_attrs
()
->
Add
();
attr
->
set_name
(
"scale"
);
attr
->
set_name
(
"scale"
);
...
@@ -103,8 +113,13 @@ TEST(OpRegistry, IllegalAttr) {
...
@@ -103,8 +113,13 @@ TEST(OpRegistry, IllegalAttr) {
TEST
(
OpRegistry
,
DefaultValue
)
{
TEST
(
OpRegistry
,
DefaultValue
)
{
paddle
::
framework
::
OpDesc
op_desc
;
paddle
::
framework
::
OpDesc
op_desc
;
op_desc
.
set_type
(
"cos_sim"
);
op_desc
.
set_type
(
"cos_sim"
);
op_desc
.
add_inputs
(
"aa"
);
auto
input
=
op_desc
.
add_inputs
();
op_desc
.
add_outputs
(
"bb"
);
input
->
set_op_proto_name
(
"input"
);
*
input
->
mutable_var_names
()
->
Add
()
=
"aa"
;
auto
output
=
op_desc
.
add_outputs
();
output
->
set_op_proto_name
(
"output"
);
*
output
->
mutable_var_names
()
->
Add
()
=
"bb"
;
ASSERT_TRUE
(
op_desc
.
IsInitialized
());
ASSERT_TRUE
(
op_desc
.
IsInitialized
());
...
@@ -127,8 +142,13 @@ static void SetInputFormat(paddle::framework::OpDesc* desc) {
...
@@ -127,8 +142,13 @@ static void SetInputFormat(paddle::framework::OpDesc* desc) {
TEST
(
OpRegistry
,
CustomChecker
)
{
TEST
(
OpRegistry
,
CustomChecker
)
{
paddle
::
framework
::
OpDesc
op_desc
;
paddle
::
framework
::
OpDesc
op_desc
;
op_desc
.
set_type
(
"my_test_op"
);
op_desc
.
set_type
(
"my_test_op"
);
op_desc
.
add_inputs
(
"ii"
);
auto
input
=
op_desc
.
add_inputs
();
op_desc
.
add_outputs
(
"oo"
);
input
->
set_op_proto_name
(
"input"
);
*
input
->
mutable_var_names
()
->
Add
()
=
"ii"
;
auto
output
=
op_desc
.
add_outputs
();
output
->
set_op_proto_name
(
"output"
);
*
output
->
mutable_var_names
()
->
Add
()
=
"oo"
;
SetInputFormat
(
&
op_desc
);
SetInputFormat
(
&
op_desc
);
// attr 'test_attr' is not set
// attr 'test_attr' is not set
...
...
paddle/framework/operator_test.cc
浏览文件 @
dba618c0
...
@@ -27,12 +27,12 @@ class OpWithoutKernelTest : public OperatorBase {
...
@@ -27,12 +27,12 @@ class OpWithoutKernelTest : public OperatorBase {
void
InferShape
(
const
Scope
&
scope
)
const
override
{}
void
InferShape
(
const
Scope
&
scope
)
const
override
{}
void
Run
(
const
Scope
&
scope
,
void
Run
(
const
Scope
&
scope
,
const
platform
::
DeviceContext
&
dev_ctx
)
const
override
{
const
platform
::
DeviceContext
&
dev_ctx
)
const
override
{
op_run_num
++
;
++
op_run_num
;
ASSERT_EQ
(
(
int
)
inputs_
.
size
(
),
1
);
ASSERT_EQ
(
static_cast
<
int
>
(
inputs_
.
size
()
),
1
);
ASSERT_EQ
(
(
int
)
outputs_
.
size
(
),
1
);
ASSERT_EQ
(
static_cast
<
int
>
(
outputs_
.
size
()
),
1
);
ASSERT_EQ
(
scope
.
FindVar
(
inputs_
[
0
]),
nullptr
);
ASSERT_EQ
(
scope
.
FindVar
(
inputs_
.
at
(
"input"
)
[
0
]),
nullptr
);
ASSERT_EQ
(
x
,
1
);
ASSERT_EQ
(
x
,
1
);
ASSERT_NE
(
scope
.
FindVar
(
outputs_
[
0
]),
nullptr
);
ASSERT_NE
(
scope
.
FindVar
(
outputs_
.
at
(
"output"
)
[
0
]),
nullptr
);
}
}
public:
public:
...
@@ -60,8 +60,13 @@ REGISTER_OP(test_operator, paddle::framework::OpWithoutKernelTest,
...
@@ -60,8 +60,13 @@ REGISTER_OP(test_operator, paddle::framework::OpWithoutKernelTest,
TEST
(
OperatorBase
,
all
)
{
TEST
(
OperatorBase
,
all
)
{
paddle
::
framework
::
OpDesc
op_desc
;
paddle
::
framework
::
OpDesc
op_desc
;
op_desc
.
set_type
(
"test_operator"
);
op_desc
.
set_type
(
"test_operator"
);
*
op_desc
.
mutable_inputs
()
->
Add
()
=
"IN1"
;
auto
*
ipt
=
op_desc
.
mutable_inputs
()
->
Add
();
*
op_desc
.
mutable_outputs
()
->
Add
()
=
"OUT1"
;
*
ipt
->
mutable_var_names
()
->
Add
()
=
"IN1"
;
ipt
->
set_op_proto_name
(
"input"
);
auto
*
output
=
op_desc
.
mutable_outputs
()
->
Add
();
*
output
->
mutable_var_names
()
->
Add
()
=
"OUT1"
;
output
->
set_op_proto_name
(
"output"
);
auto
attr
=
op_desc
.
mutable_attrs
()
->
Add
();
auto
attr
=
op_desc
.
mutable_attrs
()
->
Add
();
attr
->
set_name
(
"scale"
);
attr
->
set_name
(
"scale"
);
attr
->
set_type
(
paddle
::
framework
::
AttrType
::
FLOAT
);
attr
->
set_type
(
paddle
::
framework
::
AttrType
::
FLOAT
);
...
@@ -113,24 +118,6 @@ class CPUKernelTest : public OpKernel {
...
@@ -113,24 +118,6 @@ class CPUKernelTest : public OpKernel {
}
}
};
};
// multiple inputs test
class
OperatorMultiInputsTest
:
public
OperatorBase
{
public:
void
Init
()
override
{
x
=
1
;
}
void
InferShape
(
const
Scope
&
scope
)
const
override
{}
void
Run
(
const
Scope
&
scope
,
const
platform
::
DeviceContext
&
dev_ctx
)
const
override
{
ASSERT_EQ
(
scope
.
FindVar
(
inputs_
[
0
]),
nullptr
);
ASSERT_EQ
(
x
,
1
);
ASSERT_NE
(
scope
.
FindVar
(
outputs_
[
0
]),
nullptr
);
ASSERT_EQ
(
Input
(
"x"
),
"IN1"
);
ASSERT_EQ
(
Input
(
"y"
),
"OUT1"
);
}
public:
float
x
=
0
;
};
class
OpKernelTestMultiInputsProtoAndCheckerMaker
class
OpKernelTestMultiInputsProtoAndCheckerMaker
:
public
OpProtoAndCheckerMaker
{
:
public
OpProtoAndCheckerMaker
{
public:
public:
...
@@ -196,8 +183,14 @@ REGISTER_OP_CPU_KERNEL(op_with_kernel,
...
@@ -196,8 +183,14 @@ REGISTER_OP_CPU_KERNEL(op_with_kernel,
TEST
(
OpKernel
,
all
)
{
TEST
(
OpKernel
,
all
)
{
paddle
::
framework
::
OpDesc
op_desc
;
paddle
::
framework
::
OpDesc
op_desc
;
op_desc
.
set_type
(
"op_with_kernel"
);
op_desc
.
set_type
(
"op_with_kernel"
);
*
op_desc
.
mutable_inputs
()
->
Add
()
=
"IN1"
;
auto
*
ipt
=
op_desc
.
mutable_inputs
()
->
Add
();
*
op_desc
.
mutable_outputs
()
->
Add
()
=
"OUT1"
;
*
ipt
->
mutable_var_names
()
->
Add
()
=
"IN1"
;
ipt
->
set_op_proto_name
(
"input"
);
auto
*
output
=
op_desc
.
mutable_outputs
()
->
Add
();
*
output
->
mutable_var_names
()
->
Add
()
=
"OUT1"
;
output
->
set_op_proto_name
(
"output"
);
auto
attr
=
op_desc
.
mutable_attrs
()
->
Add
();
auto
attr
=
op_desc
.
mutable_attrs
()
->
Add
();
attr
->
set_name
(
"scale"
);
attr
->
set_name
(
"scale"
);
attr
->
set_type
(
paddle
::
framework
::
AttrType
::
FLOAT
);
attr
->
set_type
(
paddle
::
framework
::
AttrType
::
FLOAT
);
...
@@ -223,12 +216,19 @@ TEST(OpKernel, multi_inputs) {
...
@@ -223,12 +216,19 @@ TEST(OpKernel, multi_inputs) {
OpDesc
op_desc
;
OpDesc
op_desc
;
op_desc
.
set_type
(
"op_multi_inputs_with_kernel"
);
op_desc
.
set_type
(
"op_multi_inputs_with_kernel"
);
*
op_desc
.
mutable_inputs
()
->
Add
()
=
"x0"
;
auto
x
=
op_desc
.
mutable_inputs
()
->
Add
();
*
op_desc
.
mutable_inputs
()
->
Add
()
=
"x1"
;
x
->
set_op_proto_name
(
"xs"
);
*
op_desc
.
mutable_inputs
()
->
Add
()
=
"x2"
;
*
x
->
mutable_var_names
()
->
Add
()
=
"x0"
;
*
op_desc
.
mutable_inputs
()
->
Add
()
=
"k0"
;
*
x
->
mutable_var_names
()
->
Add
()
=
"x1"
;
*
op_desc
.
mutable_outputs
()
->
Add
()
=
"y0"
;
*
x
->
mutable_var_names
()
->
Add
()
=
"x2"
;
*
op_desc
.
mutable_outputs
()
->
Add
()
=
"y1"
;
auto
k
=
op_desc
.
mutable_inputs
()
->
Add
();
k
->
set_op_proto_name
(
"k"
);
*
k
->
mutable_var_names
()
->
Add
()
=
"k0"
;
auto
y
=
op_desc
.
mutable_outputs
()
->
Add
();
y
->
set_op_proto_name
(
"ys"
);
*
y
->
mutable_var_names
()
->
Add
()
=
"y0"
;
*
y
->
mutable_var_names
()
->
Add
()
=
"y1"
;
auto
attr
=
op_desc
.
mutable_attrs
()
->
Add
();
auto
attr
=
op_desc
.
mutable_attrs
()
->
Add
();
attr
->
set_name
(
"scale"
);
attr
->
set_name
(
"scale"
);
attr
->
set_type
(
paddle
::
framework
::
AttrType
::
FLOAT
);
attr
->
set_type
(
paddle
::
framework
::
AttrType
::
FLOAT
);
...
...
paddle/framework/pybind.cc
浏览文件 @
dba618c0
...
@@ -53,9 +53,10 @@ void ExposeOperator(ClassType &m) {
...
@@ -53,9 +53,10 @@ void ExposeOperator(ClassType &m) {
return
op
.
type_
;
return
op
.
type_
;
})
})
.
def
(
"outputs"
,
.
def
(
"outputs"
,
[](
const
typename
ClassType
::
type
&
op
)
->
std
::
vector
<
std
::
string
>
{
[](
const
typename
ClassType
::
type
&
op
)
return
op
.
outputs_
;
->
std
::
unordered_map
<
std
::
string
,
std
::
vector
<
std
::
string
>>
{
})
return
op
.
outputs_
;
})
.
def
(
"__str__"
,
&
ClassType
::
type
::
DebugString
);
.
def
(
"__str__"
,
&
ClassType
::
type
::
DebugString
);
}
}
...
...
paddle/operators/fc_op.cc
浏览文件 @
dba618c0
...
@@ -22,19 +22,19 @@ class FullyConnectedOp : public NetOp {
...
@@ -22,19 +22,19 @@ class FullyConnectedOp : public NetOp {
void
Init
()
override
{
void
Init
()
override
{
AddOp
(
OpRegistry
::
CreateOp
(
"mul"
,
AddOp
(
OpRegistry
::
CreateOp
(
"mul"
,
{
{
Input
(
"X"
),
Input
(
"W"
)
,
{
"X"
,
{
Input
(
"X"
)}},
{
"Y"
,
{
Input
(
"W"
)}}
,
},
},
{
Output
(
"before_act"
)
},
{}));
{
{
"Out"
,
{
Output
(
"before_act"
)}}
},
{}));
auto
b
=
Input
(
"b"
);
auto
b
=
Input
(
"b"
);
if
(
b
!=
framework
::
kEmptyVarName
)
{
if
(
b
!=
framework
::
kEmptyVarName
)
{
AddOp
(
OpRegistry
::
CreateOp
(
"rowwise_add"
,
AddOp
(
OpRegistry
::
CreateOp
(
{
Output
(
"before_act"
),
Input
(
"b"
)
},
"rowwise_add"
,
{{
"X"
,
{
Output
(
"before_act"
)}},
{
"b"
,
{
Input
(
"b"
)}}
},
{
Output
(
"before_act"
)
},
{}));
{{
"Out"
,
{
Output
(
"before_act"
)}}
},
{}));
}
}
auto
activation
=
GetAttr
<
std
::
string
>
(
"activation"
);
auto
activation
=
GetAttr
<
std
::
string
>
(
"activation"
);
AddOp
(
OpRegistry
::
CreateOp
(
activation
,
{
Output
(
"before_act"
)
},
AddOp
(
OpRegistry
::
CreateOp
(
activation
,
{
{
"X"
,
{
Output
(
"before_act"
)}}
},
{
Output
(
"Y"
)
},
{}));
{
{
"Out"
,
{
Output
(
"Out"
)}}
},
{}));
CompleteAddOp
(
false
);
CompleteAddOp
(
false
);
}
}
};
};
...
@@ -47,7 +47,7 @@ class FullyConnectedOpMaker : public OpProtoAndCheckerMaker {
...
@@ -47,7 +47,7 @@ class FullyConnectedOpMaker : public OpProtoAndCheckerMaker {
AddInput
(
"W"
,
"the weight of fc operator"
);
AddInput
(
"W"
,
"the weight of fc operator"
);
AddInput
(
"b"
,
"the bias of fc operator"
);
AddInput
(
"b"
,
"the bias of fc operator"
);
AddOutput
(
"
Y
"
,
"the output of fc operator"
);
AddOutput
(
"
Out
"
,
"the output of fc operator"
);
AddOutput
(
"before_act"
,
"the before activation output of fc operator"
)
AddOutput
(
"before_act"
,
"the before activation output of fc operator"
)
.
SetTemporary
();
.
SetTemporary
();
AddAttr
<
std
::
string
>
(
"activation"
,
"The activation key for fc layer"
)
AddAttr
<
std
::
string
>
(
"activation"
,
"The activation key for fc layer"
)
...
...
paddle/operators/net_op_test.cc
浏览文件 @
dba618c0
...
@@ -47,23 +47,24 @@ TEST(OpKernel, all) {
...
@@ -47,23 +47,24 @@ TEST(OpKernel, all) {
ASSERT_NE
(
net
,
nullptr
);
ASSERT_NE
(
net
,
nullptr
);
auto
op1
=
std
::
make_shared
<
TestOp
>
();
auto
op1
=
std
::
make_shared
<
TestOp
>
();
op1
->
inputs_
=
{
"x"
,
"w1"
,
"b1"
};
op1
->
inputs_
=
{
{
"X"
,
{
"x"
}},
{
"W"
,
{
"w1"
}},
{
"b"
,
{
"b1"
}}
};
op1
->
outputs_
=
{
"y"
};
op1
->
outputs_
=
{
{
"Out"
,
{
"y"
}}
};
net
->
AddOp
(
op1
);
net
->
AddOp
(
op1
);
auto
op2
=
std
::
make_shared
<
TestOp
>
();
auto
op2
=
std
::
make_shared
<
TestOp
>
();
op2
->
inputs_
=
{
"y"
,
"w2"
,
"b2"
};
op2
->
inputs_
=
{
{
"X"
,
{
"y"
}},
{
"W"
,
{
"w2"
}},
{
"b"
,
{
"b2"
}}
};
op2
->
outputs_
=
{
"z"
};
op2
->
outputs_
=
{
{
"Out"
,
{
"z"
}}
};
net
->
AddOp
(
op2
);
net
->
AddOp
(
op2
);
net
->
CompleteAddOp
();
net
->
CompleteAddOp
();
AssertSameVectorWithoutOrder
({
"x"
,
"w1"
,
"b1"
,
"w2"
,
"b2"
},
net
->
inputs_
);
AssertSameVectorWithoutOrder
({
"x"
,
"w1"
,
"b1"
,
"w2"
,
"b2"
},
AssertSameVectorWithoutOrder
({
"y"
,
"z"
},
net
->
outputs_
);
net
->
inputs_
.
at
(
"__all__"
));
AssertSameVectorWithoutOrder
({
"y"
,
"z"
},
net
->
outputs_
.
at
(
"__all__"
));
auto
tmp_idx_iter
=
net
->
attrs_
.
find
(
"temporary_index"
);
auto
tmp_idx_iter
=
net
->
attrs_
.
find
(
"temporary_index"
);
ASSERT_NE
(
net
->
attrs_
.
end
(),
tmp_idx_iter
);
ASSERT_NE
(
net
->
attrs_
.
end
(),
tmp_idx_iter
);
auto
&
tmp_idx
=
boost
::
get
<
std
::
vector
<
int
>>
(
tmp_idx_iter
->
second
);
auto
&
tmp_idx
=
boost
::
get
<
std
::
vector
<
int
>>
(
tmp_idx_iter
->
second
);
ASSERT_EQ
(
1UL
,
tmp_idx
.
size
());
ASSERT_EQ
(
1UL
,
tmp_idx
.
size
());
ASSERT_EQ
(
"y"
,
net
->
outputs_
[
tmp_idx
[
0
]]);
ASSERT_EQ
(
"y"
,
net
->
outputs_
.
at
(
"__all__"
)
[
tmp_idx
[
0
]]);
Scope
scope
;
Scope
scope
;
platform
::
CPUDeviceContext
dev_ctx
;
platform
::
CPUDeviceContext
dev_ctx
;
...
@@ -78,8 +79,8 @@ TEST(OpKernel, all) {
...
@@ -78,8 +79,8 @@ TEST(OpKernel, all) {
TEST
(
NetOp
,
insert_op
)
{
TEST
(
NetOp
,
insert_op
)
{
NetOp
net
;
NetOp
net
;
auto
op1
=
std
::
make_shared
<
EmptyOp
>
();
auto
op1
=
std
::
make_shared
<
EmptyOp
>
();
op1
->
inputs_
=
{
"x"
,
"w1"
,
"b1"
};
op1
->
inputs_
=
{
{
"X"
,
{
"x"
}},
{
"W"
,
{
"w1"
}},
{
"b"
,
{
"b1"
}}
};
op1
->
outputs_
=
{
"y"
};
op1
->
outputs_
=
{
{
"Out"
,
{
"y"
}}
};
net
.
AddOp
(
op1
);
net
.
AddOp
(
op1
);
net
.
InsertOp
(
0
,
op1
);
net
.
InsertOp
(
0
,
op1
);
ASSERT_EQ
(
2UL
,
net
.
ops_
.
size
());
ASSERT_EQ
(
2UL
,
net
.
ops_
.
size
());
...
...
paddle/operators/recurrent_op_test.cc
浏览文件 @
dba618c0
此差异已折叠。
点击以展开。
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录