Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
0c96c997
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
0c96c997
编写于
8月 14, 2017
作者:
Q
qiaolongfei
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
change pybind and net_op_test
上级
2f74e608
变更
5
隐藏空白更改
内联
并排
Showing
5 changed file
with
20 addition
and
21 deletion
+20
-21
paddle/framework/grad_op_builder.cc
paddle/framework/grad_op_builder.cc
+5
-6
paddle/framework/grad_op_builder_test.cc
paddle/framework/grad_op_builder_test.cc
+6
-6
paddle/framework/pybind.cc
paddle/framework/pybind.cc
+4
-4
paddle/operators/net_op_test.cc
paddle/operators/net_op_test.cc
+2
-2
paddle/operators/recurrent_op.cc
paddle/operators/recurrent_op.cc
+3
-3
未找到文件。
paddle/framework/grad_op_builder.cc
浏览文件 @
0c96c997
...
...
@@ -23,7 +23,7 @@ static void TransOpArg(const OperatorBase* src_op,
OperatorBase
::
VarNameMap
*
vars
,
const
OpArgType
&
src_type
,
bool
is_grad
)
{
const
auto
&
src_inout
=
src_type
==
OpArgType
::
IN
?
src_op
->
inputs_
:
src_op
->
outputs_
;
src_type
==
OpArgType
::
IN
?
src_op
->
Inputs
()
:
src_op
->
Outputs
()
;
auto
&
dst_inout
=
*
vars
;
const
OpProto
&
proto
=
OpProtos
().
at
(
src_op
->
Type
());
...
...
@@ -39,13 +39,12 @@ static void TransOpArg(const OperatorBase* src_op,
dst_inout
[
dst_name
].
emplace_back
(
s
);
}
}
return
dst_inout
;
}
OperatorBase
*
BuildGradOp
(
const
OperatorBase
*
op
)
{
auto
gop_type_it
=
OpRegistry
::
grad_ops
().
find
(
op
->
type_
);
auto
gop_type_it
=
OpRegistry
::
grad_ops
().
find
(
op
->
Type
()
);
PADDLE_ENFORCE
(
gop_type_it
!=
OpRegistry
::
grad_ops
().
end
(),
"Operator %s do not register gradient type"
,
op
->
type_
);
"Operator %s do not register gradient type"
,
op
->
Type
()
);
auto
&
grad_op_type
=
gop_type_it
->
second
;
OperatorBase
::
VarNameMap
inputs
;
OperatorBase
::
VarNameMap
outputs
;
...
...
@@ -56,9 +55,9 @@ OperatorBase* BuildGradOp(const OperatorBase* op) {
auto
gop_it
=
OpRegistry
::
op_creators
().
find
(
grad_op_type
);
PADDLE_ENFORCE
(
gop_it
!=
OpRegistry
::
op_creators
().
end
(),
"Operator %s 's Gradient %s's creator cannot be found"
,
op
->
type_
,
grad_op_type
);
op
->
Type
()
,
grad_op_type
);
return
gop_it
->
second
(
grad_op_type
,
inputs
,
outputs
,
op
->
attrs_
);
return
gop_it
->
second
(
grad_op_type
,
inputs
,
outputs
,
op
->
Attrs
()
);
}
}
// namespace framework
...
...
paddle/framework/grad_op_builder_test.cc
浏览文件 @
0c96c997
...
...
@@ -52,8 +52,8 @@ TEST(GradOpBuilder, AddTwo) {
"add_two"
,
{{
"X"
,
{
"x"
}},
{
"Y"
,
{
"y"
}}},
{{
"Out"
,
{
"out"
}}},
{}));
std
::
shared_ptr
<
f
::
OperatorBase
>
grad_add_op
=
f
::
OpRegistry
::
CreateGradOp
(
*
add_op
);
EXPECT_EQ
(
grad_add_op
->
inputs_
.
size
(),
4UL
);
EXPECT_EQ
(
grad_add_op
->
outputs_
.
size
(),
2UL
);
EXPECT_EQ
(
grad_add_op
->
Inputs
()
.
size
(),
4UL
);
EXPECT_EQ
(
grad_add_op
->
Outputs
()
.
size
(),
2UL
);
EXPECT_EQ
(
grad_add_op
->
Input
(
"X"
),
"x"
);
EXPECT_EQ
(
grad_add_op
->
Input
(
"Y"
),
"y"
);
EXPECT_EQ
(
grad_add_op
->
Input
(
"Out"
),
"out"
);
...
...
@@ -76,7 +76,7 @@ TEST(GradOpBuilder, MutiInOut) {
std
::
shared_ptr
<
f
::
OperatorBase
>
grad_test_op
=
f
::
OpRegistry
::
CreateGradOp
(
*
test_op
);
ASSERT_EQ
(
grad_test_op
->
inputs_
.
size
(),
3UL
+
2UL
+
2UL
);
ASSERT_EQ
(
grad_test_op
->
Inputs
()
.
size
(),
3UL
+
2UL
+
2UL
);
EXPECT_EQ
(
grad_test_op
->
Input
(
"In1"
),
"in1"
);
EXPECT_EQ
(
grad_test_op
->
Inputs
(
"In2_mult"
),
std
::
vector
<
std
::
string
>
({
"in2_1"
,
"in2_2"
,
"in2_3"
}));
...
...
@@ -90,7 +90,7 @@ TEST(GradOpBuilder, MutiInOut) {
std
::
vector
<
std
::
string
>
(
{
f
::
GradVarName
(
"out2_1"
),
f
::
GradVarName
(
"out2_2"
)}));
ASSERT_EQ
(
grad_test_op
->
outputs_
.
size
(),
3UL
);
ASSERT_EQ
(
grad_test_op
->
Outputs
()
.
size
(),
3UL
);
EXPECT_EQ
(
grad_test_op
->
Output
(
f
::
GradVarName
(
"In1"
)),
f
::
GradVarName
(
"in1"
));
EXPECT_EQ
(
grad_test_op
->
Outputs
(
f
::
GradVarName
(
"In2_mult"
)),
std
::
vector
<
std
::
string
>
({
f
::
GradVarName
(
"in2_1"
),
...
...
@@ -109,7 +109,7 @@ TEST(GradOpBuilder, IOIgnoredInGradient) {
f
::
OpRegistry
::
CreateGradOp
(
*
test_op
);
// 'In2' and 'Out2' are ignored in gradient calculating
ASSERT_EQ
(
grad_test_op
->
inputs_
.
size
(),
2UL
+
1UL
+
2UL
);
ASSERT_EQ
(
grad_test_op
->
Inputs
()
.
size
(),
2UL
+
1UL
+
2UL
);
EXPECT_EQ
(
grad_test_op
->
Input
(
"In1"
),
"in1"
);
EXPECT_EQ
(
grad_test_op
->
Inputs
(
"In3_mult"
),
std
::
vector
<
std
::
string
>
({
"in3_1"
,
"in3_2"
}));
...
...
@@ -121,7 +121,7 @@ TEST(GradOpBuilder, IOIgnoredInGradient) {
EXPECT_EQ
(
grad_test_op
->
Input
(
f
::
GradVarName
(
"Out2"
)),
f
::
GradVarName
(
"out2"
));
ASSERT_EQ
(
grad_test_op
->
outputs_
.
size
(),
3UL
);
ASSERT_EQ
(
grad_test_op
->
Outputs
()
.
size
(),
3UL
);
EXPECT_EQ
(
grad_test_op
->
Output
(
f
::
GradVarName
(
"In1"
)),
f
::
GradVarName
(
"in1"
));
EXPECT_EQ
(
grad_test_op
->
Outputs
(
f
::
GradVarName
(
"In2_mult"
)),
std
::
vector
<
std
::
string
>
(
...
...
paddle/framework/pybind.cc
浏览文件 @
0c96c997
...
...
@@ -53,15 +53,15 @@ void ExposeOperator(ClassType &m) {
.
def
(
"run"
,
&
ClassType
::
type
::
Run
)
.
def
(
"type"
,
[](
const
typename
ClassType
::
type
&
op
)
->
std
::
string
{
return
op
.
type_
;
return
op
.
Type
()
;
})
.
def
(
"outputs"
,
[](
const
typename
ClassType
::
type
&
op
)
->
std
::
map
<
std
::
string
,
std
::
vector
<
std
::
string
>>
{
return
op
.
outputs_
;
return
op
.
Outputs
()
;
})
.
def
(
"inputs"
,
[](
const
typename
ClassType
::
type
&
op
)
{
return
op
.
inputs_
;
})
[](
const
typename
ClassType
::
type
&
op
)
{
return
op
.
Inputs
()
;
})
.
def
(
"__str__"
,
&
ClassType
::
type
::
DebugString
)
.
def
(
"no_intermediate_outputs"
,
[](
const
typename
ClassType
::
type
&
op
)
{
...
...
@@ -229,7 +229,7 @@ All parameter, weight, gradient are variables in Paddle.
net
.
def_static
(
"create"
,
[]()
->
std
::
shared_ptr
<
operators
::
NetOp
>
{
auto
retv
=
std
::
make_shared
<
operators
::
NetOp
>
();
retv
->
type_
=
"plain_net"
;
retv
->
SetType
(
"plain_net"
)
;
return
retv
;
})
.
def
(
"add_op"
,
&
operators
::
NetOp
::
AddOp
)
...
...
paddle/operators/net_op_test.cc
浏览文件 @
0c96c997
...
...
@@ -56,8 +56,8 @@ TEST(OpKernel, all) {
net
->
CompleteAddOp
();
AssertSameVectorWithoutOrder
({
"x"
,
"w1"
,
"b1"
,
"w2"
,
"b2"
},
net
->
inputs_
.
at
(
NetOp
::
kAll
));
AssertSameVectorWithoutOrder
({
"y"
,
"z"
},
net
->
outputs_
.
at
(
NetOp
::
kAll
));
net
->
Inputs
(
NetOp
::
kAll
));
AssertSameVectorWithoutOrder
({
"y"
,
"z"
},
net
->
Outputs
(
NetOp
::
kAll
));
auto
final_outs
=
net
->
OutputVars
(
false
);
...
...
paddle/operators/recurrent_op.cc
浏览文件 @
0c96c997
...
...
@@ -82,14 +82,14 @@ void RecurrentAlgorithm::CreateScopes(const Scope& scope) const {
PADDLE_ENFORCE
(
net_var
!=
nullptr
,
"no stepnet called %s in scope"
,
arg_
->
step_net
);
auto
net_op
=
net_var
->
GetMutable
<
NetOp
>
();
PADDLE_ENFORCE
(
!
net_op
->
outputs_
.
empty
(),
"net_op has no outputs"
);
PADDLE_ENFORCE
(
!
net_op
->
Outputs
()
.
empty
(),
"net_op has no outputs"
);
if
(
seq_len_
>
step_scopes
->
size
())
{
for
(
size_t
i
=
step_scopes
->
size
();
i
<
seq_len_
;
++
i
)
{
auto
&
step_scope
=
scope
.
NewScope
();
// create step net's temp inputs
for
(
auto
&
input
:
net_op
->
inputs_
)
{
for
(
auto
&
input
:
net_op
->
Inputs
()
)
{
// the weight are located in parent scope
for
(
auto
&
var_name
:
input
.
second
)
{
if
(
!
step_scope
.
FindVar
(
var_name
))
{
...
...
@@ -98,7 +98,7 @@ void RecurrentAlgorithm::CreateScopes(const Scope& scope) const {
}
}
// create stepnet's outputs
for
(
const
auto
&
output
:
net_op
->
outputs_
)
{
for
(
const
auto
&
output
:
net_op
->
Outputs
()
)
{
for
(
auto
&
var_name
:
output
.
second
)
{
step_scope
.
NewVar
(
var_name
);
}
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录