Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
PaddleDetection
提交
7202f425
P
PaddleDetection
项目概览
PaddlePaddle
/
PaddleDetection
大约 1 年 前同步成功
通知
695
Star
11112
Fork
2696
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
184
列表
看板
标记
里程碑
合并请求
40
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PaddleDetection
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
184
Issue
184
列表
看板
标记
里程碑
合并请求
40
合并请求
40
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
7202f425
编写于
8月 10, 2017
作者:
Q
qingqing01
提交者:
GitHub
8月 10, 2017
浏览文件
操作
浏览文件
下载
差异文件
Merge branch 'refactorize_framework_proto' into feature/refactorize_framework_proto
上级
030f4302
36709d05
变更
6
展开全部
显示空白变更内容
内联
并排
Showing
6 changed file
with
264 addition
and
478 deletion
+264
-478
paddle/framework/grad_op_builder.cc
paddle/framework/grad_op_builder.cc
+14
-54
paddle/framework/grad_op_builder_test.cc
paddle/framework/grad_op_builder_test.cc
+19
-21
paddle/framework/op_registry_test.cc
paddle/framework/op_registry_test.cc
+0
-10
paddle/framework/operator_test.cc
paddle/framework/operator_test.cc
+2
-17
paddle/operators/recurrent_op_test.cc
paddle/operators/recurrent_op_test.cc
+227
-376
python/paddle/v2/framework/tests/test_operator.py
python/paddle/v2/framework/tests/test_operator.py
+2
-0
未找到文件。
paddle/framework/grad_op_builder.cc
浏览文件 @
7202f425
...
...
@@ -18,59 +18,32 @@ permissions and limitations under the License. */
namespace
paddle
{
namespace
framework
{
/**
class
OpRegistry
;
using
VarIndexMap
=
std
::
unordered_map
<
std
::
string
,
int
>
;
enum
class
OpArgType
{
IN
,
OUT
};
static std::vector<int>* GetOpFormat(OperatorBase* op, const OpArgType& type) {
std::string key = type == OpArgType::IN ? "input_format" : "output_format";
return op->attrs_.count(key)
? &boost::get<std::vector<int>>(op->attrs_.at(key))
: nullptr;
}
static const std::vector<int>* GetOpFormat(const OperatorBase* op,
const OpArgType& type) {
std::string key = type == OpArgType::IN ? "input_format" : "output_format";
return op->attrs_.count(key)
? &boost::get<std::vector<int>>(op->attrs_.at(key))
: nullptr;
}
static
void
TransOpArg
(
const
OperatorBase
*
src_op
,
OperatorBase
*
dst_op
,
const
OpArgType
&
src_type
,
const
OpArgType
&
dst_type
,
int& idx,
bool is_grad) {
const
std::vector<std::string>
& src_inout =
bool
is_grad
)
{
const
auto
&
src_inout
=
src_type
==
OpArgType
::
IN
?
src_op
->
inputs_
:
src_op
->
outputs_
;
const std::vector<int>* src_format = GetOpFormat(src_op, src_type);
std::vector<std::string>
& dst_inout =
auto
&
dst_inout
=
dst_type
==
OpArgType
::
IN
?
dst_op
->
inputs_
:
dst_op
->
outputs_
;
std::vector<int>* dst_format = GetOpFormat(dst_op, dst_type);
const
OpProto
&
proto
=
OpRegistry
::
protos
().
at
(
src_op
->
type_
);
const
auto
&
src_arg_list
=
src_type
==
OpArgType
::
IN
?
proto
.
inputs
()
:
proto
.
outputs
();
for
(
const
auto
&
arg
:
src_arg_list
)
{
std
::
string
src_name
=
arg
.
name
();
std::string dst_name = is_grad ? src_name + kGradVarSuffix : src_name;
(*dst_op->in_out_idxs_)[dst_name] = idx++;
int src_arg_idx = src_op->in_out_idxs_->at(src_name);
int src_begin =
src_format == nullptr ? src_arg_idx : src_format->at(src_arg_idx);
int src_end = src_format == nullptr ? src_arg_idx + 1
: src_format->at(src_arg_idx + 1);
for (int i = src_begin; i < src_end; ++i) {
std::string s =
is_grad ? src_inout[i] + kGradVarSuffix
: (arg.ignore_gradient() ? kEmptyVarName : src_inout[i]);
dst_inout.emplace_back(s);
}
if (dst_format != nullptr) {
dst_format->push_back(dst_inout.size());
std
::
string
dst_name
=
is_grad
?
GradVarName
(
src_name
)
:
src_name
;
for
(
auto
&
var_name
:
src_inout
.
at
(
src_name
))
{
std
::
string
s
=
is_grad
?
GradVarName
(
var_name
)
:
(
arg
.
no_gradient
()
?
kEmptyVarName
:
var_name
);
dst_inout
[
dst_name
].
emplace_back
(
s
);
}
}
}
...
...
@@ -80,25 +53,12 @@ OperatorBase* BuildGradOp(const OperatorBase* op) {
OperatorBase
*
grad_op
=
OpRegistry
::
op_creators
().
at
(
grad_op_type
)();
grad_op
->
type_
=
grad_op_type
;
grad_op
->
attrs_
=
op
->
attrs_
;
grad_op->attrs_.erase("input_format");
grad_op->attrs_.erase("output_format");
if (GetOpFormat(op, OpArgType::IN) != nullptr) {
grad_op->attrs_["output_format"] = std::vector<int>({0});
}
if (GetOpFormat(op, OpArgType::IN) != nullptr ||
GetOpFormat(op, OpArgType::OUT) != nullptr) {
grad_op->attrs_["input_format"] = std::vector<int>({0});
}
grad_op->in_out_idxs_.reset(new VarIndexMap());
int in_idx = 0;
int out_idx = 0;
TransOpArg(op, grad_op, OpArgType::IN, OpArgType::IN, in_idx, false); // I
TransOpArg(op, grad_op, OpArgType::OUT, OpArgType::IN, in_idx, false); // G
TransOpArg(op, grad_op, OpArgType::OUT, OpArgType::IN, in_idx, true); // OG
TransOpArg(op, grad_op, OpArgType::IN, OpArgType::OUT, out_idx, true); // IG
TransOpArg
(
op
,
grad_op
,
OpArgType
::
IN
,
OpArgType
::
IN
,
false
);
// I
TransOpArg
(
op
,
grad_op
,
OpArgType
::
OUT
,
OpArgType
::
IN
,
false
);
// O
TransOpArg
(
op
,
grad_op
,
OpArgType
::
OUT
,
OpArgType
::
IN
,
true
);
// OG
TransOpArg
(
op
,
grad_op
,
OpArgType
::
IN
,
OpArgType
::
OUT
,
true
);
// IG
return
grad_op
;
}
**/
OperatorBase
*
BuildGradOp
(
const
OperatorBase
*
op
)
{
return
nullptr
;
}
}
// namespace framework
}
// namespace paddle
paddle/framework/grad_op_builder_test.cc
浏览文件 @
7202f425
...
...
@@ -51,14 +51,14 @@ TEST(GradOpBuilder, AddTwo) {
"add_two"
,
{{
"X"
,
{
"x"
}},
{
"Y"
,
{
"y"
}}},
{{
"Out"
,
{
"out"
}}},
{}));
std
::
shared_ptr
<
f
::
OperatorBase
>
grad_add_op
=
f
::
OpRegistry
::
CreateGradOp
(
*
add_op
);
EXPECT_EQ
(
static_cast
<
int
>
(
grad_add_op
->
inputs_
.
size
()),
4
);
EXPECT_EQ
(
static_cast
<
int
>
(
grad_add_op
->
outputs_
.
size
()),
2
);
EXPECT_EQ
(
grad_add_op
->
inputs_
.
size
(),
4UL
);
EXPECT_EQ
(
grad_add_op
->
outputs_
.
size
(),
2UL
);
EXPECT_EQ
(
grad_add_op
->
Input
(
"X"
),
"x"
);
EXPECT_EQ
(
grad_add_op
->
Input
(
"Y"
),
"y"
);
EXPECT_EQ
(
grad_add_op
->
Input
(
"Out"
),
"out"
);
EXPECT_EQ
(
grad_add_op
->
Input
(
"Out@GRAD"
),
"out@GRAD"
);
EXPECT_EQ
(
grad_add_op
->
Output
(
"X@GRAD"
),
"x@GRAD"
);
EXPECT_EQ
(
grad_add_op
->
Output
(
"Y@GRAD"
),
"y@GRAD"
);
EXPECT_EQ
(
grad_add_op
->
Input
(
f
::
GradVarName
(
"Out"
)),
f
::
GradVarName
(
"out"
)
);
EXPECT_EQ
(
grad_add_op
->
Output
(
f
::
GradVarName
(
"X"
)),
f
::
GradVarName
(
"x"
)
);
EXPECT_EQ
(
grad_add_op
->
Output
(
f
::
GradVarName
(
"Y"
)),
f
::
GradVarName
(
"y"
)
);
}
REGISTER_OP
(
mult_io
,
f
::
NOP
,
f
::
MutiInOutOpMaker
);
...
...
@@ -67,17 +67,16 @@ REGISTER_OP(io_ignored, f::NOP, f::IOIgnoredOpMaker);
REGISTER_GRADIENT_OP
(
io_ignored
,
io_ignored_grad
,
f
::
NOP
);
TEST
(
GradOpBuilder
,
MutiInOut
)
{
f
::
AttributeMap
attrs
{{
"input_format"
,
std
::
vector
<
int
>
{
0
,
1
,
4
,
5
}},
{
"output_format"
,
std
::
vector
<
int
>
{
0
,
1
,
3
}}};
std
::
shared_ptr
<
f
::
OperatorBase
>
test_op
(
f
::
OpRegistry
::
CreateOp
(
"mult_io"
,
{{
"In1"
,
{
"in1"
}},
"mult_io"
,
{{
"In1"
,
{
"in1"
}},
{
"In2_mult"
,
{
"in2_1"
,
"in2_2"
,
"in2_3"
}},
{
"In3"
,
{
"in3"
}}},
{{
"Out1"
,
{
"
Out2_mult"
}},
{
"Out2"
,
{
"out2_1"
,
"out2_2"
}}},
attrs
));
{{
"Out1"
,
{
"
out1"
}},
{
"Out2_mult"
,
{
"out2_1"
,
"out2_2"
}}},
{}
));
std
::
shared_ptr
<
f
::
OperatorBase
>
grad_test_op
=
f
::
OpRegistry
::
CreateGradOp
(
*
test_op
);
ASSERT_EQ
(
grad_test_op
->
inputs_
.
size
(),
5UL
+
3UL
+
3
UL
);
ASSERT_EQ
(
grad_test_op
->
inputs_
.
size
(),
3UL
+
2UL
+
2
UL
);
EXPECT_EQ
(
grad_test_op
->
Input
(
"In1"
),
"in1"
);
EXPECT_EQ
(
grad_test_op
->
Inputs
(
"In2_mult"
),
std
::
vector
<
std
::
string
>
({
"in2_1"
,
"in2_2"
,
"in2_3"
}));
...
...
@@ -91,7 +90,7 @@ TEST(GradOpBuilder, MutiInOut) {
std
::
vector
<
std
::
string
>
(
{
f
::
GradVarName
(
"out2_1"
),
f
::
GradVarName
(
"out2_2"
)}));
ASSERT_EQ
(
grad_test_op
->
outputs_
.
size
(),
5
UL
);
ASSERT_EQ
(
grad_test_op
->
outputs_
.
size
(),
3
UL
);
EXPECT_EQ
(
grad_test_op
->
Output
(
f
::
GradVarName
(
"In1"
)),
f
::
GradVarName
(
"in1"
));
EXPECT_EQ
(
grad_test_op
->
Outputs
(
f
::
GradVarName
(
"In2_mult"
)),
std
::
vector
<
std
::
string
>
({
f
::
GradVarName
(
"in2_1"
),
...
...
@@ -101,18 +100,17 @@ TEST(GradOpBuilder, MutiInOut) {
}
TEST
(
GradOpBuilder
,
IOIgnoredInGradient
)
{
f
::
AttributeMap
attrs
{{
"input_format"
,
std
::
vector
<
int
>
{
0
,
1
,
3
,
5
}},
{
"output_format"
,
std
::
vector
<
int
>
{
0
,
2
,
3
}}};
std
::
shared_ptr
<
f
::
OperatorBase
>
test_op
(
f
::
OpRegistry
::
CreateOp
(
"io_ignored"
,
{{
"In1"
,
{
"in1"
}},
"io_ignored"
,
{{
"In1"
,
{
"in1"
}},
{
"In2_mult"
,
{
"in2_1"
,
"in2_2"
}},
{
"In3_mult"
,
{
"in3_1"
,
"in3_2"
}}},
{{
"Out1_mult"
,
{
"out1_1"
,
"out1_2"
}},
{
"Out2"
,
{
"out2"
}}},
attrs
));
{{
"Out1_mult"
,
{
"out1_1"
,
"out1_2"
}},
{
"Out2"
,
{
"out2"
}}},
{}
));
std
::
shared_ptr
<
f
::
OperatorBase
>
grad_test_op
=
f
::
OpRegistry
::
CreateGradOp
(
*
test_op
);
// 'In2' and 'Out2' are ignored in gradient calculating
ASSERT_EQ
(
grad_test_op
->
inputs_
.
size
(),
5UL
+
3UL
+
3
UL
);
ASSERT_EQ
(
grad_test_op
->
inputs_
.
size
(),
3UL
+
2UL
+
2
UL
);
EXPECT_EQ
(
grad_test_op
->
Input
(
"In1"
),
"in1"
);
EXPECT_EQ
(
grad_test_op
->
Inputs
(
"In2_mult"
),
std
::
vector
<
std
::
string
>
({
f
::
kEmptyVarName
,
f
::
kEmptyVarName
}));
...
...
@@ -127,7 +125,7 @@ TEST(GradOpBuilder, IOIgnoredInGradient) {
EXPECT_EQ
(
grad_test_op
->
Input
(
f
::
GradVarName
(
"Out2"
)),
f
::
GradVarName
(
"out2"
));
ASSERT_EQ
(
grad_test_op
->
outputs_
.
size
(),
5
UL
);
ASSERT_EQ
(
grad_test_op
->
outputs_
.
size
(),
3
UL
);
EXPECT_EQ
(
grad_test_op
->
Output
(
f
::
GradVarName
(
"In1"
)),
f
::
GradVarName
(
"in1"
));
EXPECT_EQ
(
grad_test_op
->
Outputs
(
f
::
GradVarName
(
"In2_mult"
)),
std
::
vector
<
std
::
string
>
(
...
...
paddle/framework/op_registry_test.cc
浏览文件 @
7202f425
...
...
@@ -131,14 +131,6 @@ TEST(OpRegistry, DefaultValue) {
ASSERT_EQ
(
op
->
GetAttr
<
float
>
(
"scale"
),
1.0
);
}
static
void
SetInputFormat
(
paddle
::
framework
::
OpDesc
*
desc
)
{
auto
attr
=
desc
->
add_attrs
();
attr
->
set_name
(
"input_format"
);
attr
->
set_type
(
paddle
::
framework
::
INTS
);
attr
->
mutable_ints
()
->
Add
(
0
);
attr
->
mutable_ints
()
->
Add
(
1
);
}
TEST
(
OpRegistry
,
CustomChecker
)
{
paddle
::
framework
::
OpDesc
op_desc
;
op_desc
.
set_type
(
"my_test_op"
);
...
...
@@ -149,7 +141,6 @@ TEST(OpRegistry, CustomChecker) {
auto
output
=
op_desc
.
add_outputs
();
output
->
set_parameter
(
"output"
);
*
output
->
mutable_arguments
()
->
Add
()
=
"oo"
;
SetInputFormat
(
&
op_desc
);
// attr 'test_attr' is not set
bool
caught
=
false
;
...
...
@@ -189,7 +180,6 @@ TEST(OpRegistry, CustomChecker) {
attr
->
set_name
(
"test_attr"
);
attr
->
set_type
(
paddle
::
framework
::
AttrType
::
INT
);
attr
->
set_i
(
4
);
SetInputFormat
(
&
op_desc
);
auto
op
=
paddle
::
framework
::
OpRegistry
::
CreateOp
(
op_desc
);
paddle
::
platform
::
CPUDeviceContext
dev_ctx
;
paddle
::
framework
::
Scope
scope
;
...
...
paddle/framework/operator_test.cc
浏览文件 @
7202f425
...
...
@@ -185,11 +185,11 @@ TEST(OpKernel, all) {
op_desc
.
set_type
(
"op_with_kernel"
);
auto
*
ipt
=
op_desc
.
mutable_inputs
()
->
Add
();
*
ipt
->
mutable_arguments
()
->
Add
()
=
"IN1"
;
ipt
->
set_parameter
(
"
input
"
);
ipt
->
set_parameter
(
"
x
"
);
auto
*
output
=
op_desc
.
mutable_outputs
()
->
Add
();
*
output
->
mutable_arguments
()
->
Add
()
=
"OUT1"
;
output
->
set_parameter
(
"
output
"
);
output
->
set_parameter
(
"
y
"
);
auto
attr
=
op_desc
.
mutable_attrs
()
->
Add
();
attr
->
set_name
(
"scale"
);
...
...
@@ -234,21 +234,6 @@ TEST(OpKernel, multi_inputs) {
attr
->
set_type
(
paddle
::
framework
::
AttrType
::
FLOAT
);
attr
->
set_f
(
3.14
);
auto
attr0
=
op_desc
.
mutable_attrs
()
->
Add
();
attr0
->
set_name
(
"input_format"
);
attr0
->
set_type
(
paddle
::
framework
::
AttrType
::
INTS
);
auto
input_format
=
attr0
->
mutable_ints
();
input_format
->
Add
(
0
);
// x0
input_format
->
Add
(
3
);
// k
input_format
->
Add
(
4
);
// end
auto
attr1
=
op_desc
.
mutable_attrs
()
->
Add
();
attr1
->
set_name
(
"output_format"
);
attr1
->
set_type
(
paddle
::
framework
::
AttrType
::
INTS
);
auto
output_format
=
attr1
->
mutable_ints
();
output_format
->
Add
(
0
);
// y0
output_format
->
Add
(
2
);
// y1
paddle
::
platform
::
CPUDeviceContext
cpu_device_context
;
paddle
::
framework
::
Scope
scope
;
scope
.
NewVar
(
"x0"
)
->
GetMutable
<
Tensor
>
();
...
...
paddle/operators/recurrent_op_test.cc
浏览文件 @
7202f425
此差异已折叠。
点击以展开。
python/paddle/v2/framework/tests/test_operator.py
浏览文件 @
7202f425
...
...
@@ -74,6 +74,7 @@ class TestOpDescCreationMethod(unittest.TestCase):
expected1
.
inputs
.
extend
([
'x'
,
'w'
,
'b'
])
expected1
.
outputs
.
extend
([
'y'
])
expected1
.
type
=
'fc'
# the input_format can be removed after testing
attr
=
expected1
.
attrs
.
add
()
attr
.
name
=
'input_format'
attr
.
type
=
attribute_pb2
.
INTS
...
...
@@ -86,6 +87,7 @@ class TestOpDescCreationMethod(unittest.TestCase):
expected2
.
inputs
.
extend
([
'x1'
,
'x2'
,
'x3'
,
'w1'
,
'w2'
,
'w3'
,
'b'
])
expected2
.
outputs
.
extend
([
'y'
])
expected2
.
type
=
'fc'
# the input_format can be removed after testing
attr
=
expected2
.
attrs
.
add
()
attr
.
name
=
'input_format'
attr
.
type
=
attribute_pb2
.
INTS
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录