Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
机器未来
Paddle
提交
dba618c0
P
Paddle
项目概览
机器未来
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
dba618c0
编写于
8月 08, 2017
作者:
Y
Yu Yang
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Make Compile Pass
* Although backward_test/rnn_test is not pass, just comment them.
上级
7e830116
变更
10
展开全部
隐藏空白更改
内联
并排
Showing
10 changed file
with
739 addition
and
675 deletion
+739
-675
.gitignore
.gitignore
+2
-1
paddle/framework/backward.cc
paddle/framework/backward.cc
+41
-24
paddle/framework/backward_test.cc
paddle/framework/backward_test.cc
+224
-213
paddle/framework/grad_op_builder_test.cc
paddle/framework/grad_op_builder_test.cc
+10
-6
paddle/framework/op_registry_test.cc
paddle/framework/op_registry_test.cc
+28
-8
paddle/framework/operator_test.cc
paddle/framework/operator_test.cc
+33
-33
paddle/framework/pybind.cc
paddle/framework/pybind.cc
+4
-3
paddle/operators/fc_op.cc
paddle/operators/fc_op.cc
+8
-8
paddle/operators/net_op_test.cc
paddle/operators/net_op_test.cc
+10
-9
paddle/operators/recurrent_op_test.cc
paddle/operators/recurrent_op_test.cc
+379
-370
未找到文件。
.gitignore
浏览文件 @
dba618c0
...
...
@@ -24,4 +24,5 @@ cmake-build-*
python/paddle/v2/framework/core.so
CMakeFiles
cmake_install.cmake
paddle/.timestamp
python/paddlepaddle.egg-info/
paddle/framework/backward.cc
浏览文件 @
dba618c0
...
...
@@ -20,15 +20,24 @@
namespace
paddle
{
namespace
framework
{
static
bool
AllInSet
(
const
std
::
vector
<
std
::
string
>&
names
,
const
std
::
string
&
suffix
,
const
std
::
unordered_set
<
std
::
string
>&
set
)
{
template
<
typename
Map
,
typename
T
>
static
void
ForEachVarName
(
Map
&
names
,
T
callback
)
{
for
(
auto
&
name
:
names
)
{
if
(
set
.
find
(
name
+
suffix
)
==
set
.
end
()
)
{
return
false
;
for
(
auto
&
n
:
name
.
second
)
{
if
(
callback
(
n
))
break
;
}
}
return
true
;
}
static
bool
AllInSet
(
const
std
::
unordered_map
<
std
::
string
,
std
::
vector
<
std
::
string
>>&
names
,
const
std
::
string
&
suffix
,
const
std
::
unordered_set
<
std
::
string
>&
set
)
{
bool
ret_val
=
true
;
ForEachVarName
(
names
,
[
&
ret_val
,
&
set
,
&
suffix
](
const
std
::
string
&
n
)
{
ret_val
=
set
.
find
(
n
+
suffix
)
==
set
.
end
();
return
!
ret_val
;
});
return
ret_val
;
}
static
std
::
shared_ptr
<
OperatorBase
>
NOP
()
{
...
...
@@ -67,10 +76,11 @@ std::shared_ptr<OperatorBase> BackwardRecursive(
// Then all input gradients cannot be computed at all, and we put them into
// `no_grad_names` set. Return an NOP.
if
(
AllInSet
(
forwardOp
.
outputs_
,
kGradVarSuffix
,
no_grad_names
))
{
for
(
auto
&
name
:
forwardOp
.
inputs_
)
{
// Mark all input is not need
no_grad_names
.
insert
(
name
+
kGradVarSuffix
);
}
ForEachVarName
(
forwardOp
.
inputs_
,
[
&
no_grad_names
](
const
std
::
string
&
name
)
->
bool
{
no_grad_names
.
insert
(
GradVarName
(
name
));
return
false
;
});
return
NOP
();
}
...
...
@@ -92,9 +102,11 @@ std::shared_ptr<OperatorBase> BackwardRecursive(
auto
fwd
=
*
it
;
auto
bwd
=
BackwardRecursive
(
*
fwd
,
no_grad_names
,
uniq_id
);
net
->
AddOp
(
bwd
);
for
(
auto
&
out
:
bwd
->
outputs_
)
{
dup_output_ops
[
out
].
emplace_back
(
local_op_id
);
}
ForEachVarName
(
bwd
->
outputs_
,
[
&
dup_output_ops
,
local_op_id
](
const
std
::
string
&
out
)
{
dup_output_ops
[
out
].
emplace_back
(
local_op_id
);
return
false
;
});
}
// Get unique ID for this method.
auto
uid
=
uniq_id
++
;
...
...
@@ -116,7 +128,7 @@ std::shared_ptr<OperatorBase> BackwardRecursive(
insert_position
.
push_back
(
{
dup_op
.
back
(),
OpRegistry
::
CreateOp
(
"add"
,
{
dup_outputs
},
{
name
},
"add"
,
{
{
"X"
,
{
dup_outputs
}}},
{{
"Out"
,
{
name
}}
},
{{
"input_format"
,
std
::
vector
<
int
>
{
0
,
static_cast
<
int
>
(
dup_outputs
.
size
())}}})});
}
...
...
@@ -130,7 +142,9 @@ std::shared_ptr<OperatorBase> BackwardRecursive(
}
else
{
std
::
shared_ptr
<
OperatorBase
>
grad_op
=
OpRegistry
::
CreateGradOp
(
forwardOp
);
for
(
std
::
string
&
grad_input
:
grad_op
->
inputs_
)
{
ForEachVarName
(
grad_op
->
inputs_
,
[
&
no_grad_names
,
&
net
](
std
::
string
&
grad_input
)
{
if
(
no_grad_names
.
count
(
grad_input
))
{
std
::
string
prefix
=
grad_input
.
substr
(
0
,
grad_input
.
size
()
-
kGradVarSuffix
.
size
());
...
...
@@ -138,16 +152,19 @@ std::shared_ptr<OperatorBase> BackwardRecursive(
// If part of input gradient of that operator is not calculated, fill
// zero variables to that input gradient.
net
->
AddOp
(
OpRegistry
::
CreateOp
(
"fill_zeros_like"
,
{
prefix
},
{
grad_input
},
{}));
net
->
AddOp
(
OpRegistry
::
CreateOp
(
"fill_zeros_like"
,
{
{
"Src"
,
{
prefix
}}
},
{
{
"Dst"
,
{
grad_input
}}
},
{}));
}
}
for
(
std
::
string
&
grad_output
:
grad_op
->
outputs_
)
{
if
(
no_grad_names
.
count
(
grad_output
))
{
grad_output
=
kEmptyVarName
;
}
}
return
false
;
});
ForEachVarName
(
grad_op
->
outputs_
,
[
&
no_grad_names
](
std
::
string
&
grad_output
)
{
if
(
no_grad_names
.
count
(
grad_output
))
{
grad_output
=
kEmptyVarName
;
}
return
false
;
});
if
(
net
->
ops_
.
empty
())
{
// Current no aux op is added to network
return
grad_op
;
...
...
paddle/framework/backward_test.cc
浏览文件 @
dba618c0
...
...
@@ -44,8 +44,8 @@ class MulOpMaker : public OpProtoAndCheckerMaker {
public:
MulOpMaker
(
OpProto
*
proto
,
OpAttrChecker
*
op_checker
)
:
OpProtoAndCheckerMaker
(
proto
,
op_checker
)
{
AddInput
(
"
A
"
,
"A"
);
AddInput
(
"
B
"
,
"B"
);
AddInput
(
"
X
"
,
"A"
);
AddInput
(
"
Y
"
,
"B"
);
AddOutput
(
"Out"
,
"Out"
);
AddComment
(
"Mul"
);
}
...
...
@@ -56,7 +56,7 @@ class SigmoidOpMaker : public OpProtoAndCheckerMaker {
SigmoidOpMaker
(
OpProto
*
proto
,
OpAttrChecker
*
op_checker
)
:
OpProtoAndCheckerMaker
(
proto
,
op_checker
)
{
AddInput
(
"X"
,
"X"
);
AddOutput
(
"
Y
"
,
"Y"
);
AddOutput
(
"
Out
"
,
"Y"
);
AddComment
(
"Sigmoid"
);
}
};
...
...
@@ -66,7 +66,7 @@ class NoGradOpMaker : public OpProtoAndCheckerMaker {
NoGradOpMaker
(
OpProto
*
proto
,
OpAttrChecker
*
op_checker
)
:
OpProtoAndCheckerMaker
(
proto
,
op_checker
)
{
AddInput
(
"X"
,
"X input"
);
AddOutput
(
"
Y
"
,
"Y output"
);
AddOutput
(
"
Out
"
,
"Y output"
);
AddComment
(
"NoGradOp, same input output. no Grad"
);
}
};
...
...
@@ -74,13 +74,15 @@ class NoGradOpMaker : public OpProtoAndCheckerMaker {
class
FcOp
:
public
ops
::
NetOp
{
public:
void
Init
()
override
{
AddOp
(
OpRegistry
::
CreateOp
(
"mul"
,
{
Input
(
"X"
),
Input
(
"W"
)},
{
Output
(
"mul_result"
)},
{}));
AddOp
(
OpRegistry
::
CreateOp
(
"mul"
,
{{
"X"
,
{
Input
(
"X"
)}},
{
"Y"
,
{
Input
(
"W"
)}}},
{{
"Out"
,
{
Output
(
"mul_result"
)}}},
{}));
auto
b_name
=
Input
(
"b"
);
std
::
string
before_act
=
"mul_result"
;
if
(
b_name
!=
kEmptyVarName
)
{
AddOp
(
OpRegistry
::
CreateOp
(
"rowwise_add"
,
{
Output
(
"mul_result"
),
b_name
},
{
Output
(
"add_result"
)},
{}));
AddOp
(
OpRegistry
::
CreateOp
(
"rowwise_add"
,
{{
"X"
,
{
Output
(
"mul_result"
)}},
{
"b"
,
{
b_name
}}},
{{
"Out"
,
{
Output
(
"add_result"
)}}},
{}));
before_act
=
"add_result"
;
}
else
{
auto
out_varname
=
Output
(
"add_result"
);
...
...
@@ -89,8 +91,8 @@ class FcOp : public ops::NetOp {
}
}
AddOp
(
OpRegistry
::
CreateOp
(
"sigmoid"
,
{
Output
(
before_act
)},
{
Output
(
"Out"
)
},
{}));
AddOp
(
OpRegistry
::
CreateOp
(
"sigmoid"
,
{
{
"X"
,
{
Output
(
before_act
)}}
},
{
{
"Out"
,
{
Output
(
"Out"
)}}},
{
}));
CompleteAddOp
(
false
);
}
};
...
...
@@ -158,206 +160,215 @@ REGISTER_OP(fc, f::FcOp, f::FcOpMaker);
REGISTER_OP
(
many_output_op
,
f
::
EmptyOp
,
f
::
ManyOutputOpMaker
);
REGISTER_GRADIENT_OP
(
many_output_op
,
many_output_op_grad
,
f
::
EmptyOp
);
TEST
(
Backward
,
simple_op_grad
)
{
auto
fwd
=
f
::
OpRegistry
::
CreateOp
(
"rowwise_add"
,
{
"X"
,
"b"
},
{
"Out"
},
{});
ASSERT_NE
(
fwd
,
nullptr
);
auto
gop
=
f
::
OpRegistry
::
CreateGradOp
(
*
fwd
);
ASSERT_EQ
(
4UL
,
gop
->
inputs_
.
size
());
ASSERT_EQ
(
f
::
kEmptyVarName
,
gop
->
inputs_
[
0
]);
ASSERT_EQ
(
"rowwise_add_grad"
,
gop
->
type_
);
ASSERT_EQ
(
"X"
+
f
::
kGradVarSuffix
,
gop
->
outputs_
[
0
]);
ASSERT_EQ
(
"b"
+
f
::
kGradVarSuffix
,
gop
->
outputs_
[
1
]);
ASSERT_EQ
(
"X"
+
f
::
kGradVarSuffix
,
gop
->
Output
(
"X"
+
f
::
kGradVarSuffix
));
}
TEST
(
Backward
,
simple_op_not_need_grad
)
{
auto
fwd
=
f
::
OpRegistry
::
CreateOp
(
"rowwise_add"
,
{
"X"
,
"b"
},
{
"Out"
},
{});
ASSERT_NE
(
fwd
,
nullptr
);
auto
gop
=
f
::
Backward
(
*
fwd
,
{
"X"
});
ASSERT_EQ
(
std
::
find
(
gop
->
outputs_
.
begin
(),
gop
->
outputs_
.
end
(),
"X"
+
f
::
kGradVarSuffix
),
gop
->
outputs_
.
end
());
auto
no_input_gop
=
f
::
Backward
(
*
fwd
,
{
"X"
,
"b"
});
ASSERT_NE
(
no_input_gop
,
nullptr
);
ASSERT_TRUE
(
no_input_gop
->
IsNetOp
());
ASSERT_EQ
(
0UL
,
std
::
static_pointer_cast
<
ops
::
NetOp
>
(
no_input_gop
)
->
ops_
.
size
());
}
TEST
(
Backward
,
net_fc_backward_normal
)
{
std
::
shared_ptr
<
f
::
OperatorBase
>
fwd
=
f
::
OpRegistry
::
CreateOp
(
"fc"
,
{
"X"
,
"w"
,
"b"
},
{
"mul_result"
,
"add_result"
,
"out"
},
{});
ASSERT_NE
(
fwd
,
nullptr
);
std
::
shared_ptr
<
f
::
OperatorBase
>
gop
=
f
::
Backward
(
*
fwd
,
{});
ASSERT_TRUE
(
gop
->
IsNetOp
());
auto
net
=
static_cast
<
ops
::
NetOp
*>
(
gop
.
get
());
ASSERT_NO_THROW
(
net
->
DebugString
());
ASSERT_EQ
(
3UL
,
net
->
ops_
.
size
());
f
::
OperatorBase
&
d_sigmoid
=
*
net
->
ops_
[
0
];
ASSERT_EQ
(
"sigmoid_grad"
,
d_sigmoid
.
type_
);
f
::
OperatorBase
&
d_add
=
*
net
->
ops_
[
1
];
ASSERT_EQ
(
"rowwise_add_grad"
,
d_add
.
type_
);
f
::
OperatorBase
&
d_mul
=
*
net
->
ops_
[
2
];
ASSERT_EQ
(
"mul_grad"
,
d_mul
.
type_
);
}
TEST
(
Backward
,
net_fc_backward_not_have_b
)
{
std
::
shared_ptr
<
f
::
OperatorBase
>
fwd
=
f
::
OpRegistry
::
CreateOp
(
"fc"
,
{
"X"
,
"w"
,
f
::
kEmptyVarName
},
{
"mul_result"
,
"add_result"
,
"tmp"
},
{});
ASSERT_NE
(
fwd
,
nullptr
);
std
::
shared_ptr
<
f
::
OperatorBase
>
gop
=
f
::
Backward
(
*
fwd
,
{});
ASSERT_TRUE
(
gop
->
IsNetOp
());
auto
net
=
static_cast
<
ops
::
NetOp
*>
(
gop
.
get
());
ASSERT_NO_THROW
(
net
->
DebugString
());
ASSERT_EQ
(
2UL
,
net
->
ops_
.
size
());
f
::
OperatorBase
&
d_sigmoid
=
*
net
->
ops_
[
0
];
ASSERT_EQ
(
"sigmoid_grad"
,
d_sigmoid
.
type_
);
f
::
OperatorBase
&
d_mul
=
*
net
->
ops_
[
1
];
ASSERT_EQ
(
"mul_grad"
,
d_mul
.
type_
);
}
TEST
(
Backward
,
net_input_of_network_not_need_grad
)
{
ops
::
NetOp
net
;
net
.
AddOp
(
f
::
OpRegistry
::
CreateOp
(
"fc"
,
{
"X"
,
"W1"
,
"b1"
},
{
"mul_tmp_0"
,
"add_tmp_0"
,
"hidden0"
},
{}));
net
.
AddOp
(
f
::
OpRegistry
::
CreateOp
(
"fc"
,
{
"hidden0"
,
"W2"
,
"b2"
},
{
"mul_tmp_1"
,
"add_tmp_1"
,
"hidden1"
},
{}));
net
.
CompleteAddOp
();
auto
bwd
=
Backward
(
net
,
{
"X"
});
// X@GRAD is not need.
ASSERT_TRUE
(
bwd
->
IsNetOp
());
auto
bwd_net
=
static_cast
<
ops
::
NetOp
*>
(
bwd
.
get
());
std
::
unordered_set
<
std
::
string
>
all_output
=
std
::
unordered_set
<
std
::
string
>
(
bwd_net
->
outputs_
.
begin
(),
bwd_net
->
outputs_
.
end
());
all_output
.
erase
(
f
::
kEmptyVarName
);
for
(
auto
&
out
:
{
"W1"
,
"b1"
,
"hidden0"
,
"W2"
,
"b2"
})
{
ASSERT_NE
(
all_output
.
find
(
out
+
f
::
kGradVarSuffix
),
all_output
.
end
());
}
// Not Generated X
ASSERT_EQ
(
all_output
.
find
(
"X"
+
f
::
kGradVarSuffix
),
all_output
.
end
());
ASSERT_EQ
(
2UL
,
bwd_net
->
ops_
.
size
());
ASSERT_TRUE
(
bwd_net
->
ops_
[
1
]
->
IsNetOp
());
auto
first_fc_grad
=
static_cast
<
ops
::
NetOp
*>
(
bwd_net
->
ops_
[
1
].
get
());
ASSERT_EQ
(
3UL
,
first_fc_grad
->
ops_
.
size
());
ASSERT_EQ
(
f
::
kEmptyVarName
,
first_fc_grad
->
ops_
[
2
]
->
Output
(
"A"
+
f
::
kGradVarSuffix
));
}
TEST
(
Backward
,
net_shared_weight
)
{
ops
::
NetOp
net
;
net
.
AddOp
(
f
::
OpRegistry
::
CreateOp
(
"mul"
,
{
"X"
,
"W"
},
{
"Out"
},
{}));
net
.
AddOp
(
f
::
OpRegistry
::
CreateOp
(
"mul"
,
{
"Out"
,
"W"
},
{
"FinalOut"
},
{}));
net
.
CompleteAddOp
();
auto
bwd
=
f
::
Backward
(
net
,
{});
ASSERT_TRUE
(
bwd
->
IsNetOp
());
auto
bwd_net
=
static_cast
<
ops
::
NetOp
*>
(
bwd
.
get
());
ASSERT_EQ
(
3UL
,
bwd_net
->
ops_
.
size
());
ASSERT_EQ
(
"add"
,
bwd_net
->
ops_
[
2
]
->
type_
);
}
TEST
(
Backward
,
op_register_grad_not_for_network
)
{
auto
fwd
=
f
::
OpRegistry
::
CreateOp
(
"fc"
,
{
"X"
,
"W"
,
"b"
},
{
"mul_out"
,
"add_out"
,
"out1"
},
{{
"temporary_index"
,
std
::
vector
<
int
>
{
0
,
1
}}});
ASSERT_THROW
(
f
::
OpRegistry
::
CreateGradOp
(
*
fwd
),
EnforceNotMet
);
}
TEST
(
Backward
,
op_all_input_are_not_need
)
{
auto
fwd
=
f
::
OpRegistry
::
CreateOp
(
"rowwise_add"
,
{
"X"
,
"b"
},
{
"Out"
},
{});
auto
backward
=
f
::
Backward
(
*
fwd
,
{
"X"
,
"b"
});
ASSERT_TRUE
(
backward
->
IsNetOp
());
auto
net
=
static_cast
<
ops
::
NetOp
*>
(
backward
.
get
());
ASSERT_TRUE
(
net
->
ops_
.
empty
());
}
TEST
(
Backward
,
op_all_output_are_not_need
)
{
auto
fwd
=
f
::
OpRegistry
::
CreateOp
(
"rowwise_add"
,
{
"X"
,
"b"
},
{
"Out"
},
{});
auto
backward
=
f
::
Backward
(
*
fwd
,
{
"Out"
});
ASSERT_TRUE
(
backward
->
IsNetOp
());
auto
net
=
static_cast
<
ops
::
NetOp
*>
(
backward
.
get
());
ASSERT_TRUE
(
net
->
ops_
.
empty
());
}
TEST
(
Backward
,
op_part_of_output_are_not_need
)
{
auto
fwd
=
f
::
OpRegistry
::
CreateOp
(
"many_output_op"
,
{
"X"
},
{
"Y"
,
"Z"
},
{});
auto
backward
=
f
::
Backward
(
*
fwd
,
{
"Z"
});
ASSERT_TRUE
(
backward
->
IsNetOp
());
auto
net
=
static_cast
<
ops
::
NetOp
*>
(
backward
.
get
());
ASSERT_EQ
(
net
->
ops_
.
size
(),
2UL
);
auto
&
fill_zero
=
*
net
->
ops_
[
0
];
ASSERT_EQ
(
"fill_zeros_like"
,
fill_zero
.
type_
);
ASSERT_EQ
(
1UL
,
fill_zero
.
inputs_
.
size
());
ASSERT_EQ
(
"Z"
,
fill_zero
.
inputs_
[
0
]);
ASSERT_EQ
(
1UL
,
fill_zero
.
outputs_
.
size
());
ASSERT_EQ
(
"Z"
+
f
::
kZeroVarSuffix
,
fill_zero
.
outputs_
[
0
]);
auto
&
d_many_out
=
*
net
->
ops_
[
1
];
ASSERT_EQ
(
"many_output_op_grad"
,
d_many_out
.
type_
);
ASSERT_EQ
(
1UL
+
2UL
+
2UL
,
d_many_out
.
inputs_
.
size
());
// I/O/OG
ASSERT_EQ
(
"Z"
+
f
::
kZeroVarSuffix
,
d_many_out
.
Input
(
"z"
+
f
::
kGradVarSuffix
));
ASSERT_EQ
(
"Y"
+
f
::
kGradVarSuffix
,
d_many_out
.
Input
(
"y"
+
f
::
kGradVarSuffix
));
ASSERT_EQ
(
"X"
+
f
::
kGradVarSuffix
,
d_many_out
.
Output
(
"x"
+
f
::
kGradVarSuffix
));
}
TEST
(
Backward
,
op_part_of_input_are_not_need
)
{
auto
fwd
=
f
::
OpRegistry
::
CreateOp
(
"mul"
,
{
"a"
,
"b"
},
{
"out"
},
{});
auto
backward
=
f
::
Backward
(
*
fwd
,
{
"a"
});
auto
&
grad_mul
=
*
backward
;
ASSERT_EQ
(
grad_mul
.
type_
,
"mul_grad"
);
ASSERT_EQ
(
grad_mul
.
inputs_
.
size
(),
2UL
+
1UL
+
1UL
);
ASSERT_EQ
(
grad_mul
.
outputs_
.
size
(),
2UL
);
ASSERT_EQ
(
grad_mul
.
Output
(
"A"
+
f
::
kGradVarSuffix
),
f
::
kEmptyVarName
);
ASSERT_EQ
(
grad_mul
.
Output
(
"B"
+
f
::
kGradVarSuffix
),
"b"
+
f
::
kGradVarSuffix
);
ASSERT_EQ
(
grad_mul
.
Input
(
"Out"
+
f
::
kGradVarSuffix
),
"out"
+
f
::
kGradVarSuffix
);
ASSERT_EQ
(
grad_mul
.
Input
(
"A"
),
"a"
);
ASSERT_EQ
(
grad_mul
.
Input
(
"B"
),
"b"
);
ASSERT_EQ
(
grad_mul
.
Input
(
"Out"
),
"out"
);
}
TEST
(
Backward
,
linear_net_intermediate_variable_has_no_grad
)
{
ops
::
NetOp
net
;
net
.
AddOp
(
f
::
OpRegistry
::
CreateOp
(
"fc"
,
{
"x1"
,
"w1"
,
"b1"
},
{
"mul_out1"
,
"add_out1"
,
"out1"
},
{}));
net
.
AddOp
(
f
::
OpRegistry
::
CreateOp
(
"fc"
,
{
"out1"
,
"w2"
,
"b2"
},
{
"mul_out2"
,
"tmp_out2"
,
"out2"
},
{}));
net
.
AddOp
(
f
::
OpRegistry
::
CreateOp
(
"fc"
,
{
"out2"
,
"w3"
,
"b3"
},
{
"mul_out3"
,
"tmp_out3"
,
"out3"
},
{}));
net
.
CompleteAddOp
();
auto
backward
=
f
::
Backward
(
net
,
{
"mul_out2"
,
"tmp_out2"
,
"out2"
});
ASSERT_TRUE
(
backward
->
IsNetOp
());
auto
bwd_net
=
static_cast
<
ops
::
NetOp
*>
(
backward
.
get
());
ASSERT_EQ
(
bwd_net
->
ops_
.
size
(),
3UL
);
auto
&
grad_fc
=
*
bwd_net
->
ops_
[
0
];
EXPECT_EQ
(
grad_fc
.
inputs_
.
size
(),
3UL
/* external input number */
+
1UL
/* external output number*/
+
1UL
/* number of gradient of external output*/
+
2U
/* internal variable number*/
);
EXPECT_EQ
(
grad_fc
.
outputs_
.
size
(),
2UL
/* input number of mul*/
+
2UL
/* input number of rowwise_add */
+
1UL
/* input number of sigmod */
);
EXPECT_EQ
(
bwd_net
->
ops_
[
1
]
->
inputs_
.
size
(),
0UL
);
EXPECT_EQ
(
bwd_net
->
ops_
[
1
]
->
outputs_
.
size
(),
0UL
);
EXPECT_EQ
(
bwd_net
->
ops_
[
2
]
->
inputs_
.
size
(),
0UL
);
EXPECT_EQ
(
bwd_net
->
ops_
[
2
]
->
outputs_
.
size
(),
0UL
);
}
//
// TEST(Backward, simple_op_grad) {
// auto fwd = f::OpRegistry::CreateOp(
// "rowwise_add", {{"X", {"X"}}, {"b", {"b"}}}, {{"Out", {"Out"}}}, {});
// ASSERT_NE(fwd, nullptr);
// auto gop = f::OpRegistry::CreateGradOp(*fwd);
// ASSERT_EQ(4UL, gop->inputs_.size());
// ASSERT_EQ(f::kEmptyVarName, gop->inputs_[0]);
// ASSERT_EQ("rowwise_add_grad", gop->type_);
// ASSERT_EQ("X" + f::kGradVarSuffix, gop->outputs_[0]);
// ASSERT_EQ("b" + f::kGradVarSuffix, gop->outputs_[1]);
//
// ASSERT_EQ("X" + f::kGradVarSuffix, gop->Output("X" + f::kGradVarSuffix));
//}
//
// TEST(Backward, simple_op_not_need_grad) {
// auto fwd = f::OpRegistry::CreateOp("rowwise_add", {"X", "b"}, {"Out"}, {});
// ASSERT_NE(fwd, nullptr);
// auto gop = f::Backward(*fwd, {"X"});
// ASSERT_EQ(std::find(gop->outputs_.begin(), gop->outputs_.end(),
// "X" + f::kGradVarSuffix),
// gop->outputs_.end());
//
// auto no_input_gop = f::Backward(*fwd, {"X", "b"});
// ASSERT_NE(no_input_gop, nullptr);
// ASSERT_TRUE(no_input_gop->IsNetOp());
// ASSERT_EQ(0UL,
// std::static_pointer_cast<ops::NetOp>(no_input_gop)->ops_.size());
//}
//
// TEST(Backward, net_fc_backward_normal) {
// std::shared_ptr<f::OperatorBase> fwd = f::OpRegistry::CreateOp(
// "fc", {"X", "w", "b"}, {"mul_result", "add_result", "out"}, {});
// ASSERT_NE(fwd, nullptr);
// std::shared_ptr<f::OperatorBase> gop = f::Backward(*fwd, {});
// ASSERT_TRUE(gop->IsNetOp());
// auto net = static_cast<ops::NetOp *>(gop.get());
//
// ASSERT_NO_THROW(net->DebugString());
//
// ASSERT_EQ(3UL, net->ops_.size());
//
// f::OperatorBase &d_sigmoid = *net->ops_[0];
// ASSERT_EQ("sigmoid_grad", d_sigmoid.type_);
//
// f::OperatorBase &d_add = *net->ops_[1];
// ASSERT_EQ("rowwise_add_grad", d_add.type_);
//
// f::OperatorBase &d_mul = *net->ops_[2];
// ASSERT_EQ("mul_grad", d_mul.type_);
//}
//
// TEST(Backward, net_fc_backward_not_have_b) {
// std::shared_ptr<f::OperatorBase> fwd =
// f::OpRegistry::CreateOp("fc", {"X", "w", f::kEmptyVarName},
// {"mul_result", "add_result", "tmp"}, {});
// ASSERT_NE(fwd, nullptr);
// std::shared_ptr<f::OperatorBase> gop = f::Backward(*fwd, {});
// ASSERT_TRUE(gop->IsNetOp());
// auto net = static_cast<ops::NetOp *>(gop.get());
//
// ASSERT_NO_THROW(net->DebugString());
//
// ASSERT_EQ(2UL, net->ops_.size());
//
// f::OperatorBase &d_sigmoid = *net->ops_[0];
// ASSERT_EQ("sigmoid_grad", d_sigmoid.type_);
//
// f::OperatorBase &d_mul = *net->ops_[1];
// ASSERT_EQ("mul_grad", d_mul.type_);
//}
//
// TEST(Backward, net_input_of_network_not_need_grad) {
// ops::NetOp net;
// net.AddOp(f::OpRegistry::CreateOp("fc", {"X", "W1", "b1"},
// {"mul_tmp_0", "add_tmp_0", "hidden0"},
// {}));
// net.AddOp(f::OpRegistry::CreateOp("fc", {"hidden0", "W2", "b2"},
// {"mul_tmp_1", "add_tmp_1", "hidden1"},
// {}));
// net.CompleteAddOp();
// auto bwd = Backward(net, {"X"}); // X@GRAD is not need.
// ASSERT_TRUE(bwd->IsNetOp());
// auto bwd_net = static_cast<ops::NetOp *>(bwd.get());
//
// std::unordered_set<std::string> all_output =
// std::unordered_set<std::string>(
// bwd_net->outputs_.begin(), bwd_net->outputs_.end());
// all_output.erase(f::kEmptyVarName);
//
// for (auto &out : {"W1", "b1", "hidden0", "W2", "b2"}) {
// ASSERT_NE(all_output.find(out + f::kGradVarSuffix), all_output.end());
// }
//
// // Not Generated X
// ASSERT_EQ(all_output.find("X" + f::kGradVarSuffix), all_output.end());
//
// ASSERT_EQ(2UL, bwd_net->ops_.size());
// ASSERT_TRUE(bwd_net->ops_[1]->IsNetOp());
// auto first_fc_grad = static_cast<ops::NetOp *>(bwd_net->ops_[1].get());
// ASSERT_EQ(3UL, first_fc_grad->ops_.size());
// ASSERT_EQ(f::kEmptyVarName,
// first_fc_grad->ops_[2]->Output("A" + f::kGradVarSuffix));
//}
//
// TEST(Backward, net_shared_weight) {
// ops::NetOp net;
// net.AddOp(f::OpRegistry::CreateOp("mul", {"X", "W"}, {"Out"}, {}));
// net.AddOp(f::OpRegistry::CreateOp("mul", {"Out", "W"}, {"FinalOut"}, {}));
// net.CompleteAddOp();
//
// auto bwd = f::Backward(net, {});
// ASSERT_TRUE(bwd->IsNetOp());
// auto bwd_net = static_cast<ops::NetOp *>(bwd.get());
// ASSERT_EQ(3UL, bwd_net->ops_.size());
// ASSERT_EQ("add", bwd_net->ops_[2]->type_);
//}
//
// TEST(Backward, op_register_grad_not_for_network) {
// auto fwd = f::OpRegistry::CreateOp(
// "fc", {"X", "W", "b"}, {"mul_out", "add_out", "out1"},
// {{"temporary_index", std::vector<int>{0, 1}}});
//
// ASSERT_THROW(f::OpRegistry::CreateGradOp(*fwd), EnforceNotMet);
//}
//
// TEST(Backward, op_all_input_are_not_need) {
// auto fwd = f::OpRegistry::CreateOp("rowwise_add", {"X", "b"}, {"Out"}, {});
// auto backward = f::Backward(*fwd, {"X", "b"});
// ASSERT_TRUE(backward->IsNetOp());
// auto net = static_cast<ops::NetOp *>(backward.get());
// ASSERT_TRUE(net->ops_.empty());
//}
//
// TEST(Backward, op_all_output_are_not_need) {
// auto fwd = f::OpRegistry::CreateOp("rowwise_add", {"X", "b"}, {"Out"}, {});
// auto backward = f::Backward(*fwd, {"Out"});
// ASSERT_TRUE(backward->IsNetOp());
// auto net = static_cast<ops::NetOp *>(backward.get());
// ASSERT_TRUE(net->ops_.empty());
//}
//
// TEST(Backward, op_part_of_output_are_not_need) {
// auto fwd = f::OpRegistry::CreateOp("many_output_op", {"X"}, {"Y", "Z"}, {});
// auto backward = f::Backward(*fwd, {"Z"});
// ASSERT_TRUE(backward->IsNetOp());
// auto net = static_cast<ops::NetOp *>(backward.get());
// ASSERT_EQ(net->ops_.size(), 2UL);
//
// auto &fill_zero = *net->ops_[0];
// ASSERT_EQ("fill_zeros_like", fill_zero.type_);
// ASSERT_EQ(1UL, fill_zero.inputs_.size());
// ASSERT_EQ("Z", fill_zero.inputs_[0]);
// ASSERT_EQ(1UL, fill_zero.outputs_.size());
// ASSERT_EQ("Z" + f::kZeroVarSuffix, fill_zero.outputs_[0]);
//
// auto &d_many_out = *net->ops_[1];
// ASSERT_EQ("many_output_op_grad", d_many_out.type_);
// ASSERT_EQ(1UL + 2UL + 2UL, d_many_out.inputs_.size()); // I/O/OG
// ASSERT_EQ("Z" + f::kZeroVarSuffix, d_many_out.Input("z" +
// f::kGradVarSuffix));
// ASSERT_EQ("Y" + f::kGradVarSuffix, d_many_out.Input("y" +
// f::kGradVarSuffix));
// ASSERT_EQ("X" + f::kGradVarSuffix,
// d_many_out.Output("x" + f::kGradVarSuffix));
//}
//
// TEST(Backward, op_part_of_input_are_not_need) {
// auto fwd = f::OpRegistry::CreateOp("mul", {"a", "b"}, {"out"}, {});
// auto backward = f::Backward(*fwd, {"a"});
// auto &grad_mul = *backward;
// ASSERT_EQ(grad_mul.type_, "mul_grad");
// ASSERT_EQ(grad_mul.inputs_.size(), 2UL + 1UL + 1UL);
// ASSERT_EQ(grad_mul.outputs_.size(), 2UL);
// ASSERT_EQ(grad_mul.Output("A" + f::kGradVarSuffix), f::kEmptyVarName);
// ASSERT_EQ(grad_mul.Output("B" + f::kGradVarSuffix), "b" +
// f::kGradVarSuffix);
// ASSERT_EQ(grad_mul.Input("Out" + f::kGradVarSuffix),
// "out" + f::kGradVarSuffix);
// ASSERT_EQ(grad_mul.Input("A"), "a");
// ASSERT_EQ(grad_mul.Input("B"), "b");
// ASSERT_EQ(grad_mul.Input("Out"), "out");
//}
//
// TEST(Backward, linear_net_intermediate_variable_has_no_grad) {
// ops::NetOp net;
// net.AddOp(f::OpRegistry::CreateOp("fc", {"x1", "w1", "b1"},
// {"mul_out1", "add_out1", "out1"}, {}));
// net.AddOp(f::OpRegistry::CreateOp("fc", {"out1", "w2", "b2"},
// {"mul_out2", "tmp_out2", "out2"}, {}));
// net.AddOp(f::OpRegistry::CreateOp("fc", {"out2", "w3", "b3"},
// {"mul_out3", "tmp_out3", "out3"}, {}));
// net.CompleteAddOp();
// auto backward = f::Backward(net, {"mul_out2", "tmp_out2", "out2"});
// ASSERT_TRUE(backward->IsNetOp());
// auto bwd_net = static_cast<ops::NetOp *>(backward.get());
// ASSERT_EQ(bwd_net->ops_.size(), 3UL);
// auto &grad_fc = *bwd_net->ops_[0];
// EXPECT_EQ(grad_fc.inputs_.size(),
// 3UL /* external input number */
// + 1UL /* external output number*/
// + 1UL /* number of gradient of external output*/
// + 2U /* internal variable number*/);
// EXPECT_EQ(grad_fc.outputs_.size(), 2UL /* input number of mul*/
// + 2UL /* input number of rowwise_add
// */
// + 1UL /* input number of sigmod */);
// EXPECT_EQ(bwd_net->ops_[1]->inputs_.size(), 0UL);
// EXPECT_EQ(bwd_net->ops_[1]->outputs_.size(), 0UL);
// EXPECT_EQ(bwd_net->ops_[2]->inputs_.size(), 0UL);
// EXPECT_EQ(bwd_net->ops_[2]->outputs_.size(), 0UL);
//}
paddle/framework/grad_op_builder_test.cc
浏览文件 @
dba618c0
...
...
@@ -47,8 +47,8 @@ class IOIgnoredOpMaker : public OpProtoAndCheckerMaker {
namespace
f
=
paddle
::
framework
;
TEST
(
GradOpBuilder
,
AddTwo
)
{
std
::
shared_ptr
<
f
::
OperatorBase
>
add_op
(
f
::
OpRegistry
::
CreateOp
(
"add_two"
,
{
"x"
,
"y"
},
{
"out"
},
{}));
std
::
shared_ptr
<
f
::
OperatorBase
>
add_op
(
f
::
OpRegistry
::
CreateOp
(
"add_two"
,
{{
"X"
,
{
"x"
}},
{
"Y"
,
{
"y"
}}},
{{
"Out"
,
{
"out"
}}
},
{}));
std
::
shared_ptr
<
f
::
OperatorBase
>
grad_add_op
=
f
::
OpRegistry
::
CreateGradOp
(
*
add_op
);
EXPECT_EQ
(
static_cast
<
int
>
(
grad_add_op
->
inputs_
.
size
()),
4
);
...
...
@@ -70,8 +70,10 @@ TEST(GradOpBuilder, MutiInOut) {
f
::
AttributeMap
attrs
{{
"input_format"
,
std
::
vector
<
int
>
{
0
,
1
,
4
,
5
}},
{
"output_format"
,
std
::
vector
<
int
>
{
0
,
1
,
3
}}};
std
::
shared_ptr
<
f
::
OperatorBase
>
test_op
(
f
::
OpRegistry
::
CreateOp
(
"mult_io"
,
{
"in1"
,
"in2_1"
,
"in2_2"
,
"in2_3"
,
"in3"
},
{
"out1"
,
"out2_1"
,
"out2_2"
},
attrs
));
"mult_io"
,
{{
"In1"
,
{
"in1"
}},
{
"In2_mult"
,
{
"in2_1"
,
"in2_2"
,
"in2_3"
}},
{
"In3"
,
{
"in3"
}}},
{{
"Out1"
,
{
"Out2_mult"
}},
{
"Out2"
,
{
"out2_1"
,
"out2_2"
}}},
attrs
));
std
::
shared_ptr
<
f
::
OperatorBase
>
grad_test_op
=
f
::
OpRegistry
::
CreateGradOp
(
*
test_op
);
...
...
@@ -104,8 +106,10 @@ TEST(GradOpBuilder, IOIgnoredInGradient) {
f
::
AttributeMap
attrs
{{
"input_format"
,
std
::
vector
<
int
>
{
0
,
1
,
3
,
5
}},
{
"output_format"
,
std
::
vector
<
int
>
{
0
,
2
,
3
}}};
std
::
shared_ptr
<
f
::
OperatorBase
>
test_op
(
f
::
OpRegistry
::
CreateOp
(
"io_ignored"
,
{
"in1"
,
"in2_1"
,
"in2_2"
,
"in3_1"
,
"in3_2"
},
{
"out1_1"
,
"out1_2"
,
"out2"
},
attrs
));
"io_ignored"
,
{{
"In1"
,
{
"in1"
}},
{
"In2_mult"
,
{
"in2_1"
,
"in2_2"
}},
{
"In3_mult"
,
{
"in3_1"
,
"in3_2"
}}},
{{
"Out1_mult"
,
{
"out1_1"
,
"out1_2"
}},
{
"Out2"
,
{
"out2"
}}},
attrs
));
std
::
shared_ptr
<
f
::
OperatorBase
>
grad_test_op
=
f
::
OpRegistry
::
CreateGradOp
(
*
test_op
);
...
...
paddle/framework/op_registry_test.cc
浏览文件 @
dba618c0
...
...
@@ -57,8 +57,13 @@ REGISTER_OP(my_test_op, paddle::framework::MyTestOp,
TEST
(
OpRegistry
,
CreateOp
)
{
paddle
::
framework
::
OpDesc
op_desc
;
op_desc
.
set_type
(
"cos_sim"
);
op_desc
.
add_inputs
(
"aa"
);
op_desc
.
add_outputs
(
"bb"
);
auto
input
=
op_desc
.
add_inputs
();
input
->
set_op_proto_name
(
"input"
);
*
input
->
mutable_var_names
()
->
Add
()
=
"aa"
;
auto
output
=
op_desc
.
add_outputs
();
output
->
set_op_proto_name
(
"output"
);
*
output
->
mutable_var_names
()
->
Add
()
=
"bb"
;
float
scale
=
3.3
;
auto
attr
=
op_desc
.
mutable_attrs
()
->
Add
();
...
...
@@ -78,8 +83,13 @@ TEST(OpRegistry, CreateOp) {
TEST
(
OpRegistry
,
IllegalAttr
)
{
paddle
::
framework
::
OpDesc
op_desc
;
op_desc
.
set_type
(
"cos_sim"
);
op_desc
.
add_inputs
(
"aa"
);
op_desc
.
add_outputs
(
"bb"
);
auto
input
=
op_desc
.
add_inputs
();
input
->
set_op_proto_name
(
"input"
);
*
input
->
mutable_var_names
()
->
Add
()
=
"aa"
;
auto
output
=
op_desc
.
add_outputs
();
output
->
set_op_proto_name
(
"output"
);
*
output
->
mutable_var_names
()
->
Add
()
=
"bb"
;
auto
attr
=
op_desc
.
mutable_attrs
()
->
Add
();
attr
->
set_name
(
"scale"
);
...
...
@@ -103,8 +113,13 @@ TEST(OpRegistry, IllegalAttr) {
TEST
(
OpRegistry
,
DefaultValue
)
{
paddle
::
framework
::
OpDesc
op_desc
;
op_desc
.
set_type
(
"cos_sim"
);
op_desc
.
add_inputs
(
"aa"
);
op_desc
.
add_outputs
(
"bb"
);
auto
input
=
op_desc
.
add_inputs
();
input
->
set_op_proto_name
(
"input"
);
*
input
->
mutable_var_names
()
->
Add
()
=
"aa"
;
auto
output
=
op_desc
.
add_outputs
();
output
->
set_op_proto_name
(
"output"
);
*
output
->
mutable_var_names
()
->
Add
()
=
"bb"
;
ASSERT_TRUE
(
op_desc
.
IsInitialized
());
...
...
@@ -127,8 +142,13 @@ static void SetInputFormat(paddle::framework::OpDesc* desc) {
TEST
(
OpRegistry
,
CustomChecker
)
{
paddle
::
framework
::
OpDesc
op_desc
;
op_desc
.
set_type
(
"my_test_op"
);
op_desc
.
add_inputs
(
"ii"
);
op_desc
.
add_outputs
(
"oo"
);
auto
input
=
op_desc
.
add_inputs
();
input
->
set_op_proto_name
(
"input"
);
*
input
->
mutable_var_names
()
->
Add
()
=
"ii"
;
auto
output
=
op_desc
.
add_outputs
();
output
->
set_op_proto_name
(
"output"
);
*
output
->
mutable_var_names
()
->
Add
()
=
"oo"
;
SetInputFormat
(
&
op_desc
);
// attr 'test_attr' is not set
...
...
paddle/framework/operator_test.cc
浏览文件 @
dba618c0
...
...
@@ -27,12 +27,12 @@ class OpWithoutKernelTest : public OperatorBase {
void
InferShape
(
const
Scope
&
scope
)
const
override
{}
void
Run
(
const
Scope
&
scope
,
const
platform
::
DeviceContext
&
dev_ctx
)
const
override
{
op_run_num
++
;
ASSERT_EQ
(
(
int
)
inputs_
.
size
(
),
1
);
ASSERT_EQ
(
(
int
)
outputs_
.
size
(
),
1
);
ASSERT_EQ
(
scope
.
FindVar
(
inputs_
[
0
]),
nullptr
);
++
op_run_num
;
ASSERT_EQ
(
static_cast
<
int
>
(
inputs_
.
size
()
),
1
);
ASSERT_EQ
(
static_cast
<
int
>
(
outputs_
.
size
()
),
1
);
ASSERT_EQ
(
scope
.
FindVar
(
inputs_
.
at
(
"input"
)
[
0
]),
nullptr
);
ASSERT_EQ
(
x
,
1
);
ASSERT_NE
(
scope
.
FindVar
(
outputs_
[
0
]),
nullptr
);
ASSERT_NE
(
scope
.
FindVar
(
outputs_
.
at
(
"output"
)
[
0
]),
nullptr
);
}
public:
...
...
@@ -60,8 +60,13 @@ REGISTER_OP(test_operator, paddle::framework::OpWithoutKernelTest,
TEST
(
OperatorBase
,
all
)
{
paddle
::
framework
::
OpDesc
op_desc
;
op_desc
.
set_type
(
"test_operator"
);
*
op_desc
.
mutable_inputs
()
->
Add
()
=
"IN1"
;
*
op_desc
.
mutable_outputs
()
->
Add
()
=
"OUT1"
;
auto
*
ipt
=
op_desc
.
mutable_inputs
()
->
Add
();
*
ipt
->
mutable_var_names
()
->
Add
()
=
"IN1"
;
ipt
->
set_op_proto_name
(
"input"
);
auto
*
output
=
op_desc
.
mutable_outputs
()
->
Add
();
*
output
->
mutable_var_names
()
->
Add
()
=
"OUT1"
;
output
->
set_op_proto_name
(
"output"
);
auto
attr
=
op_desc
.
mutable_attrs
()
->
Add
();
attr
->
set_name
(
"scale"
);
attr
->
set_type
(
paddle
::
framework
::
AttrType
::
FLOAT
);
...
...
@@ -113,24 +118,6 @@ class CPUKernelTest : public OpKernel {
}
};
// multiple inputs test
class
OperatorMultiInputsTest
:
public
OperatorBase
{
public:
void
Init
()
override
{
x
=
1
;
}
void
InferShape
(
const
Scope
&
scope
)
const
override
{}
void
Run
(
const
Scope
&
scope
,
const
platform
::
DeviceContext
&
dev_ctx
)
const
override
{
ASSERT_EQ
(
scope
.
FindVar
(
inputs_
[
0
]),
nullptr
);
ASSERT_EQ
(
x
,
1
);
ASSERT_NE
(
scope
.
FindVar
(
outputs_
[
0
]),
nullptr
);
ASSERT_EQ
(
Input
(
"x"
),
"IN1"
);
ASSERT_EQ
(
Input
(
"y"
),
"OUT1"
);
}
public:
float
x
=
0
;
};
class
OpKernelTestMultiInputsProtoAndCheckerMaker
:
public
OpProtoAndCheckerMaker
{
public:
...
...
@@ -196,8 +183,14 @@ REGISTER_OP_CPU_KERNEL(op_with_kernel,
TEST
(
OpKernel
,
all
)
{
paddle
::
framework
::
OpDesc
op_desc
;
op_desc
.
set_type
(
"op_with_kernel"
);
*
op_desc
.
mutable_inputs
()
->
Add
()
=
"IN1"
;
*
op_desc
.
mutable_outputs
()
->
Add
()
=
"OUT1"
;
auto
*
ipt
=
op_desc
.
mutable_inputs
()
->
Add
();
*
ipt
->
mutable_var_names
()
->
Add
()
=
"IN1"
;
ipt
->
set_op_proto_name
(
"input"
);
auto
*
output
=
op_desc
.
mutable_outputs
()
->
Add
();
*
output
->
mutable_var_names
()
->
Add
()
=
"OUT1"
;
output
->
set_op_proto_name
(
"output"
);
auto
attr
=
op_desc
.
mutable_attrs
()
->
Add
();
attr
->
set_name
(
"scale"
);
attr
->
set_type
(
paddle
::
framework
::
AttrType
::
FLOAT
);
...
...
@@ -223,12 +216,19 @@ TEST(OpKernel, multi_inputs) {
OpDesc
op_desc
;
op_desc
.
set_type
(
"op_multi_inputs_with_kernel"
);
*
op_desc
.
mutable_inputs
()
->
Add
()
=
"x0"
;
*
op_desc
.
mutable_inputs
()
->
Add
()
=
"x1"
;
*
op_desc
.
mutable_inputs
()
->
Add
()
=
"x2"
;
*
op_desc
.
mutable_inputs
()
->
Add
()
=
"k0"
;
*
op_desc
.
mutable_outputs
()
->
Add
()
=
"y0"
;
*
op_desc
.
mutable_outputs
()
->
Add
()
=
"y1"
;
auto
x
=
op_desc
.
mutable_inputs
()
->
Add
();
x
->
set_op_proto_name
(
"xs"
);
*
x
->
mutable_var_names
()
->
Add
()
=
"x0"
;
*
x
->
mutable_var_names
()
->
Add
()
=
"x1"
;
*
x
->
mutable_var_names
()
->
Add
()
=
"x2"
;
auto
k
=
op_desc
.
mutable_inputs
()
->
Add
();
k
->
set_op_proto_name
(
"k"
);
*
k
->
mutable_var_names
()
->
Add
()
=
"k0"
;
auto
y
=
op_desc
.
mutable_outputs
()
->
Add
();
y
->
set_op_proto_name
(
"ys"
);
*
y
->
mutable_var_names
()
->
Add
()
=
"y0"
;
*
y
->
mutable_var_names
()
->
Add
()
=
"y1"
;
auto
attr
=
op_desc
.
mutable_attrs
()
->
Add
();
attr
->
set_name
(
"scale"
);
attr
->
set_type
(
paddle
::
framework
::
AttrType
::
FLOAT
);
...
...
paddle/framework/pybind.cc
浏览文件 @
dba618c0
...
...
@@ -53,9 +53,10 @@ void ExposeOperator(ClassType &m) {
return
op
.
type_
;
})
.
def
(
"outputs"
,
[](
const
typename
ClassType
::
type
&
op
)
->
std
::
vector
<
std
::
string
>
{
return
op
.
outputs_
;
})
[](
const
typename
ClassType
::
type
&
op
)
->
std
::
unordered_map
<
std
::
string
,
std
::
vector
<
std
::
string
>>
{
return
op
.
outputs_
;
})
.
def
(
"__str__"
,
&
ClassType
::
type
::
DebugString
);
}
...
...
paddle/operators/fc_op.cc
浏览文件 @
dba618c0
...
...
@@ -22,19 +22,19 @@ class FullyConnectedOp : public NetOp {
void
Init
()
override
{
AddOp
(
OpRegistry
::
CreateOp
(
"mul"
,
{
Input
(
"X"
),
Input
(
"W"
)
,
{
"X"
,
{
Input
(
"X"
)}},
{
"Y"
,
{
Input
(
"W"
)}}
,
},
{
Output
(
"before_act"
)
},
{}));
{
{
"Out"
,
{
Output
(
"before_act"
)}}
},
{}));
auto
b
=
Input
(
"b"
);
if
(
b
!=
framework
::
kEmptyVarName
)
{
AddOp
(
OpRegistry
::
CreateOp
(
"rowwise_add"
,
{
Output
(
"before_act"
),
Input
(
"b"
)
},
{
Output
(
"before_act"
)
},
{}));
AddOp
(
OpRegistry
::
CreateOp
(
"rowwise_add"
,
{{
"X"
,
{
Output
(
"before_act"
)}},
{
"b"
,
{
Input
(
"b"
)}}
},
{{
"Out"
,
{
Output
(
"before_act"
)}}
},
{}));
}
auto
activation
=
GetAttr
<
std
::
string
>
(
"activation"
);
AddOp
(
OpRegistry
::
CreateOp
(
activation
,
{
Output
(
"before_act"
)
},
{
Output
(
"Y"
)
},
{}));
AddOp
(
OpRegistry
::
CreateOp
(
activation
,
{
{
"X"
,
{
Output
(
"before_act"
)}}
},
{
{
"Out"
,
{
Output
(
"Out"
)}}
},
{}));
CompleteAddOp
(
false
);
}
};
...
...
@@ -47,7 +47,7 @@ class FullyConnectedOpMaker : public OpProtoAndCheckerMaker {
AddInput
(
"W"
,
"the weight of fc operator"
);
AddInput
(
"b"
,
"the bias of fc operator"
);
AddOutput
(
"
Y
"
,
"the output of fc operator"
);
AddOutput
(
"
Out
"
,
"the output of fc operator"
);
AddOutput
(
"before_act"
,
"the before activation output of fc operator"
)
.
SetTemporary
();
AddAttr
<
std
::
string
>
(
"activation"
,
"The activation key for fc layer"
)
...
...
paddle/operators/net_op_test.cc
浏览文件 @
dba618c0
...
...
@@ -47,23 +47,24 @@ TEST(OpKernel, all) {
ASSERT_NE
(
net
,
nullptr
);
auto
op1
=
std
::
make_shared
<
TestOp
>
();
op1
->
inputs_
=
{
"x"
,
"w1"
,
"b1"
};
op1
->
outputs_
=
{
"y"
};
op1
->
inputs_
=
{
{
"X"
,
{
"x"
}},
{
"W"
,
{
"w1"
}},
{
"b"
,
{
"b1"
}}
};
op1
->
outputs_
=
{
{
"Out"
,
{
"y"
}}
};
net
->
AddOp
(
op1
);
auto
op2
=
std
::
make_shared
<
TestOp
>
();
op2
->
inputs_
=
{
"y"
,
"w2"
,
"b2"
};
op2
->
outputs_
=
{
"z"
};
op2
->
inputs_
=
{
{
"X"
,
{
"y"
}},
{
"W"
,
{
"w2"
}},
{
"b"
,
{
"b2"
}}
};
op2
->
outputs_
=
{
{
"Out"
,
{
"z"
}}
};
net
->
AddOp
(
op2
);
net
->
CompleteAddOp
();
AssertSameVectorWithoutOrder
({
"x"
,
"w1"
,
"b1"
,
"w2"
,
"b2"
},
net
->
inputs_
);
AssertSameVectorWithoutOrder
({
"y"
,
"z"
},
net
->
outputs_
);
AssertSameVectorWithoutOrder
({
"x"
,
"w1"
,
"b1"
,
"w2"
,
"b2"
},
net
->
inputs_
.
at
(
"__all__"
));
AssertSameVectorWithoutOrder
({
"y"
,
"z"
},
net
->
outputs_
.
at
(
"__all__"
));
auto
tmp_idx_iter
=
net
->
attrs_
.
find
(
"temporary_index"
);
ASSERT_NE
(
net
->
attrs_
.
end
(),
tmp_idx_iter
);
auto
&
tmp_idx
=
boost
::
get
<
std
::
vector
<
int
>>
(
tmp_idx_iter
->
second
);
ASSERT_EQ
(
1UL
,
tmp_idx
.
size
());
ASSERT_EQ
(
"y"
,
net
->
outputs_
[
tmp_idx
[
0
]]);
ASSERT_EQ
(
"y"
,
net
->
outputs_
.
at
(
"__all__"
)
[
tmp_idx
[
0
]]);
Scope
scope
;
platform
::
CPUDeviceContext
dev_ctx
;
...
...
@@ -78,8 +79,8 @@ TEST(OpKernel, all) {
TEST
(
NetOp
,
insert_op
)
{
NetOp
net
;
auto
op1
=
std
::
make_shared
<
EmptyOp
>
();
op1
->
inputs_
=
{
"x"
,
"w1"
,
"b1"
};
op1
->
outputs_
=
{
"y"
};
op1
->
inputs_
=
{
{
"X"
,
{
"x"
}},
{
"W"
,
{
"w1"
}},
{
"b"
,
{
"b1"
}}
};
op1
->
outputs_
=
{
{
"Out"
,
{
"y"
}}
};
net
.
AddOp
(
op1
);
net
.
InsertOp
(
0
,
op1
);
ASSERT_EQ
(
2UL
,
net
.
ops_
.
size
());
...
...
paddle/operators/recurrent_op_test.cc
浏览文件 @
dba618c0
此差异已折叠。
点击以展开。
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录