Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
7f9af125
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
7f9af125
编写于
8月 17, 2017
作者:
L
Luo Tao
浏览文件
操作
浏览文件
下载
差异文件
Merge branch 'develop' into huber_loss
上级
cbad985b
766299b8
变更
12
隐藏空白更改
内联
并排
Showing
12 changed file
with
132 addition
and
160 deletion
+132
-160
paddle/framework/backward.cc
paddle/framework/backward.cc
+20
-22
paddle/framework/backward.h
paddle/framework/backward.h
+1
-1
paddle/framework/backward_test.cc
paddle/framework/backward_test.cc
+1
-2
paddle/framework/op_registry.cc
paddle/framework/op_registry.cc
+5
-6
paddle/framework/op_registry.h
paddle/framework/op_registry.h
+3
-3
paddle/framework/op_registry_test.cc
paddle/framework/op_registry_test.cc
+2
-4
paddle/framework/pybind.cc
paddle/framework/pybind.cc
+56
-85
paddle/memory/memory.cc
paddle/memory/memory.cc
+1
-0
paddle/operators/net_op.h
paddle/operators/net_op.h
+17
-13
paddle/operators/net_op_test.cc
paddle/operators/net_op_test.cc
+10
-13
paddle/operators/recurrent_op.h
paddle/operators/recurrent_op.h
+16
-10
python/paddle/v2/framework/tests/gradient_checker.py
python/paddle/v2/framework/tests/gradient_checker.py
+0
-1
未找到文件。
paddle/framework/backward.cc
浏览文件 @
7f9af125
...
...
@@ -15,6 +15,8 @@
#include "paddle/framework/backward.h"
#include <list>
#include <memory>
#include "paddle/framework/op_registry.h"
#include "paddle/operators/net_op.h"
#include "paddle/operators/recurrent_op.h"
...
...
@@ -43,11 +45,11 @@ static bool AllInSet(
return
all_in_set
;
}
static
std
::
shared
_ptr
<
OperatorBase
>
NOP
()
{
auto
net_op
=
std
::
make_shared
<
operators
::
NetOp
>
();
static
std
::
unique
_ptr
<
OperatorBase
>
NOP
()
{
auto
net_op
=
new
operators
::
NetOp
();
net_op
->
SetType
(
"@NOP@"
);
net_op
->
CompleteAddOp
();
return
net_op
;
return
std
::
unique_ptr
<
OperatorBase
>
(
net_op
)
;
}
// Get backward operator from a forward operator, a recursive implementation.
...
...
@@ -62,11 +64,7 @@ static std::shared_ptr<OperatorBase> NOP() {
// operator, in a complex situation, it maybe a NetOp.
//
// See Backward.h for details
static
std
::
shared_ptr
<
OperatorBase
>
BackwardRecursive
(
const
OperatorBase
&
forwardOp
,
std
::
unordered_set
<
std
::
string
>&
no_grad_names
,
size_t
&
uniq_id
);
std
::
shared_ptr
<
OperatorBase
>
BackwardRecursive
(
static
std
::
unique_ptr
<
OperatorBase
>
BackwardRecursive
(
const
OperatorBase
&
forwardOp
,
std
::
unordered_set
<
std
::
string
>&
no_grad_names
,
size_t
&
uniq_id
)
{
// If all input gradients of forwarding operator do not need to calculate,
...
...
@@ -91,7 +89,7 @@ std::shared_ptr<OperatorBase> BackwardRecursive(
}
// Returned gradient network
auto
net
=
std
::
make_shared
<
operators
::
NetOp
>
(
);
auto
net
=
std
::
unique_ptr
<
operators
::
NetOp
>
(
new
operators
::
NetOp
()
);
if
(
forwardOp
.
IsNetOp
())
{
// Because forwardOp is a net op, it can static_cast.
...
...
@@ -105,14 +103,14 @@ std::shared_ptr<OperatorBase> BackwardRecursive(
// reversely travel forwardNet and collect all duplicate outputs.
for
(
auto
it
=
forwardNet
.
ops_
.
rbegin
();
it
!=
forwardNet
.
ops_
.
rend
();
++
it
,
++
local_op_id
)
{
auto
fwd
=
*
it
;
auto
&
fwd
=
*
it
;
auto
bwd
=
BackwardRecursive
(
*
fwd
,
no_grad_names
,
uniq_id
);
net
->
AddOp
(
bwd
);
ForEachVarName
(
bwd
->
Outputs
(),
[
&
dup_output_ops
,
local_op_id
](
const
std
::
string
&
out
)
{
dup_output_ops
[
out
].
emplace_back
(
local_op_id
);
return
false
;
});
net
->
AddOp
(
std
::
move
(
bwd
));
}
// Get unique ID for this method.
auto
uid
=
uniq_id
++
;
...
...
@@ -122,7 +120,7 @@ std::shared_ptr<OperatorBase> BackwardRecursive(
// to handle this case. For each duplicate output, rename it to an alias
// (original name with a offset), append an `add` op for its operator,
// and finally sum all the alias variable to the final output variable y.
using
Pos
=
std
::
pair
<
size_t
,
std
::
shared
_ptr
<
OperatorBase
>>
;
using
Pos
=
std
::
pair
<
size_t
,
std
::
unique
_ptr
<
OperatorBase
>>
;
std
::
list
<
Pos
>
insert_position
;
for
(
auto
&
dup_output_op
:
dup_output_ops
)
{
const
std
::
string
&
name
=
dup_output_op
.
first
;
...
...
@@ -150,13 +148,13 @@ std::shared_ptr<OperatorBase> BackwardRecursive(
[](
const
Pos
&
l
,
const
Pos
&
r
)
{
return
l
.
first
>
r
.
first
;
});
for
(
auto
&
pos
:
insert_position
)
{
net
->
InsertOp
(
pos
.
first
+
1
,
pos
.
second
);
net
->
InsertOp
(
pos
.
first
+
1
,
std
::
move
(
pos
.
second
)
);
}
}
else
{
std
::
shared_ptr
<
OperatorBase
>
grad_op
=
OpRegistry
::
CreateGradOp
(
forwardOp
);
std
::
unique_ptr
<
OperatorBase
>
grad_op
(
OpRegistry
::
CreateGradOp
(
forwardOp
)
);
ForEachVarName
(
grad_op
->
Inputs
(),
[
&
no_grad_names
,
&
net
,
grad_op
](
const
std
::
string
&
grad_input
)
{
ForEachVarName
(
grad_op
->
Inputs
(),
[
&
no_grad_names
,
&
net
,
&
grad_op
](
const
std
::
string
&
grad_input
)
{
if
(
no_grad_names
.
count
(
grad_input
))
{
// +1 for \0
std
::
string
prefix
=
grad_input
.
substr
(
...
...
@@ -190,23 +188,23 @@ std::shared_ptr<OperatorBase> BackwardRecursive(
const
auto
&
stepnet_op
=
*
static_cast
<
const
OperatorBase
*>
(
&
rnnop
.
stepnet
());
// create stepnet's gradient op
auto
grad_stepnet
=
BackwardRecursive
(
stepnet_op
,
no_grad_names
,
uniq_id
);
rnn_grad_op
->
set_stepnet
(
std
::
static_pointer_cast
<
operators
::
NetOp
>
(
grad_stepnet
));
BackwardRecursive
(
stepnet_op
,
no_grad_names
,
uniq_id
));
}
if
(
net
->
ops_
.
empty
())
{
// Current no aux op is added to network
return
grad_op
;
}
net
->
AddOp
(
grad_op
);
net
->
AddOp
(
std
::
move
(
grad_op
)
);
}
net
->
SetType
(
"@GENERATED_BACKWARD@"
);
net
->
CompleteAddOp
();
return
net
;
}
// namespace framework
return
std
::
unique_ptr
<
OperatorBase
>
(
static_cast
<
OperatorBase
*>
(
net
.
release
()));
}
// See header for comments
std
::
shared
_ptr
<
OperatorBase
>
Backward
(
std
::
unique
_ptr
<
OperatorBase
>
Backward
(
const
OperatorBase
&
forwardOp
,
const
std
::
unordered_set
<
std
::
string
>&
no_grad_vars
)
{
std
::
unordered_set
<
std
::
string
>
no_grad_names
;
...
...
paddle/framework/backward.h
浏览文件 @
7f9af125
...
...
@@ -20,7 +20,7 @@ namespace framework {
// Create the backward operator from a forward operator.
// TODO(yuyang18): Add more API reference comment.
extern
std
::
shared
_ptr
<
OperatorBase
>
Backward
(
extern
std
::
unique
_ptr
<
OperatorBase
>
Backward
(
const
OperatorBase
&
forwardOp
,
const
std
::
unordered_set
<
std
::
string
>&
no_grad_vars
);
}
// namespace framework
...
...
paddle/framework/backward_test.cc
浏览文件 @
7f9af125
...
...
@@ -180,8 +180,7 @@ TEST(Backward, simple_op_not_need_grad) {
auto
no_input_gop
=
f
::
Backward
(
*
fwd
,
{
"x"
,
"b"
});
ASSERT_NE
(
no_input_gop
,
nullptr
);
ASSERT_TRUE
(
no_input_gop
->
IsNetOp
());
ASSERT_EQ
(
0UL
,
std
::
static_pointer_cast
<
ops
::
NetOp
>
(
no_input_gop
)
->
ops_
.
size
());
ASSERT_EQ
(
0UL
,
static_cast
<
ops
::
NetOp
*>
(
no_input_gop
.
get
())
->
ops_
.
size
());
}
TEST
(
Backward
,
net_fc_backward_normal
)
{
...
...
paddle/framework/op_registry.cc
浏览文件 @
7f9af125
...
...
@@ -19,7 +19,7 @@ limitations under the License. */
namespace
paddle
{
namespace
framework
{
std
::
shared
_ptr
<
OperatorBase
>
OpRegistry
::
CreateOp
(
const
std
::
string
&
type
,
std
::
unique
_ptr
<
OperatorBase
>
OpRegistry
::
CreateOp
(
const
std
::
string
&
type
,
const
VarNameMap
&
inputs
,
const
VarNameMap
&
outputs
,
AttributeMap
attrs
)
{
...
...
@@ -28,10 +28,10 @@ std::shared_ptr<OperatorBase> OpRegistry::CreateOp(const std::string& type,
"Operator '%s' has not been registered."
,
type
);
it
->
second
.
checker_
->
Check
(
attrs
);
auto
op
=
it
->
second
.
creator_
(
type
,
inputs
,
outputs
,
attrs
);
return
std
::
shared
_ptr
<
OperatorBase
>
(
op
);
return
std
::
unique
_ptr
<
OperatorBase
>
(
op
);
}
std
::
shared
_ptr
<
OperatorBase
>
OpRegistry
::
CreateOp
(
const
OpDesc
&
op_desc
)
{
std
::
unique
_ptr
<
OperatorBase
>
OpRegistry
::
CreateOp
(
const
OpDesc
&
op_desc
)
{
VarNameMap
inputs
=
ConvertOpDescVarsToVarNameMap
(
op_desc
.
inputs
());
VarNameMap
outputs
=
ConvertOpDescVarsToVarNameMap
(
op_desc
.
outputs
());
AttributeMap
attrs
;
...
...
@@ -55,10 +55,9 @@ OperatorBase::VarNameMap OpRegistry::ConvertOpDescVarsToVarNameMap(
return
ret_val
;
}
std
::
shared
_ptr
<
OperatorBase
>
OpRegistry
::
CreateGradOp
(
const
OperatorBase
&
op
)
{
std
::
unique
_ptr
<
OperatorBase
>
OpRegistry
::
CreateGradOp
(
const
OperatorBase
&
op
)
{
PADDLE_ENFORCE
(
!
op
.
IsNetOp
(),
"Use framework::Backward to get backward ops"
);
std
::
shared_ptr
<
OperatorBase
>
grad_op
(
BuildGradOp
(
&
op
));
return
grad_op
;
return
std
::
unique_ptr
<
OperatorBase
>
(
BuildGradOp
(
&
op
));
}
}
// namespace framework
...
...
paddle/framework/op_registry.h
浏览文件 @
7f9af125
...
...
@@ -77,17 +77,17 @@ class OpRegistry {
}
}
static
std
::
shared
_ptr
<
OperatorBase
>
CreateOp
(
const
std
::
string
&
type
,
static
std
::
unique
_ptr
<
OperatorBase
>
CreateOp
(
const
std
::
string
&
type
,
const
VarNameMap
&
inputs
,
const
VarNameMap
&
outputs
,
AttributeMap
attrs
);
static
std
::
shared
_ptr
<
OperatorBase
>
CreateOp
(
const
OpDesc
&
op_desc
);
static
std
::
unique
_ptr
<
OperatorBase
>
CreateOp
(
const
OpDesc
&
op_desc
);
static
VarNameMap
ConvertOpDescVarsToVarNameMap
(
const
google
::
protobuf
::
RepeatedPtrField
<
OpDesc
::
Var
>&
op_desc_vars
);
static
std
::
shared
_ptr
<
OperatorBase
>
CreateGradOp
(
const
OperatorBase
&
op
);
static
std
::
unique
_ptr
<
OperatorBase
>
CreateGradOp
(
const
OperatorBase
&
op
);
static
std
::
unordered_map
<
std
::
string
,
const
OpInfo
>&
op_info_map
()
{
static
std
::
unordered_map
<
std
::
string
,
const
OpInfo
>
op_info_map_
;
...
...
paddle/framework/op_registry_test.cc
浏览文件 @
7f9af125
...
...
@@ -76,8 +76,7 @@ TEST(OpRegistry, CreateOp) {
attr
->
set_type
(
paddle
::
framework
::
AttrType
::
FLOAT
);
attr
->
set_f
(
scale
);
std
::
shared_ptr
<
paddle
::
framework
::
OperatorBase
>
op
=
paddle
::
framework
::
OpRegistry
::
CreateOp
(
op_desc
);
auto
op
=
paddle
::
framework
::
OpRegistry
::
CreateOp
(
op_desc
);
paddle
::
framework
::
Scope
scope
;
paddle
::
platform
::
CPUDeviceContext
dev_ctx
;
op
->
Run
(
scope
,
dev_ctx
);
...
...
@@ -118,8 +117,7 @@ TEST(OpRegistry, DefaultValue) {
ASSERT_TRUE
(
op_desc
.
IsInitialized
());
std
::
shared_ptr
<
paddle
::
framework
::
OperatorBase
>
op
=
paddle
::
framework
::
OpRegistry
::
CreateOp
(
op_desc
);
auto
op
=
paddle
::
framework
::
OpRegistry
::
CreateOp
(
op_desc
);
paddle
::
framework
::
Scope
scope
;
paddle
::
platform
::
CPUDeviceContext
dev_ctx
;
op
->
Run
(
scope
,
dev_ctx
);
...
...
paddle/framework/pybind.cc
浏览文件 @
7f9af125
...
...
@@ -48,29 +48,6 @@ namespace framework {
using
Tensor
=
framework
::
Tensor
;
template
<
typename
ClassType
>
void
ExposeOperator
(
ClassType
&
m
)
{
m
.
def
(
"infer_shape"
,
&
ClassType
::
type
::
InferShape
)
.
def
(
"run"
,
&
ClassType
::
type
::
Run
)
.
def
(
"type"
,
[](
const
typename
ClassType
::
type
&
op
)
->
std
::
string
{
return
op
.
Type
();
})
.
def
(
"outputs"
,
[](
const
typename
ClassType
::
type
&
op
)
->
std
::
map
<
std
::
string
,
std
::
vector
<
std
::
string
>>
{
return
op
.
Outputs
();
})
.
def
(
"inputs"
,
[](
const
typename
ClassType
::
type
&
op
)
{
return
op
.
Inputs
();
})
.
def
(
"__str__"
,
&
ClassType
::
type
::
DebugString
)
.
def
(
"no_intermediate_outputs"
,
[](
const
typename
ClassType
::
type
&
op
)
{
return
op
.
OutputVars
(
false
);
})
.
def
(
"support_gpu"
,
&
ClassType
::
type
::
SupportGPU
);
}
static
size_t
UniqueIntegerGenerator
()
{
static
std
::
atomic
<
size_t
>
generator
;
return
generator
.
fetch_add
(
1
);
...
...
@@ -207,75 +184,69 @@ All parameter, weight, gradient are variables in Paddle.
.
def
(
py
::
init
<>
())
.
def
(
"__str__"
,
string
::
to_string
<
const
platform
::
CPUPlace
&>
);
py
::
class_
<
OperatorBase
,
std
::
shared_ptr
<
OperatorBase
>>
operator_base
(
m
,
"Operator"
);
operator_base
.
def_static
(
"create"
,
[](
py
::
bytes
protobin
)
{
OpDesc
desc
;
PADDLE_ENFORCE
(
desc
.
ParsePartialFromString
(
protobin
),
"Cannot parse user input to OpDesc"
);
PADDLE_ENFORCE
(
desc
.
IsInitialized
(),
"User OpDesc is not initialized, reason %s"
,
desc
.
InitializationErrorString
());
return
OpRegistry
::
CreateOp
(
desc
);
});
operator_base
.
def
(
"backward"
,
[](
const
OperatorBase
&
forwardOp
,
const
std
::
unordered_set
<
std
::
string
>
&
no_grad_vars
)
{
return
Backward
(
forwardOp
,
no_grad_vars
);
});
ExposeOperator
(
operator_base
);
py
::
class_
<
operators
::
NetOp
,
std
::
shared_ptr
<
operators
::
NetOp
>>
net
(
m
,
"Net"
);
net
.
def_static
(
"create"
,
[]()
->
std
::
shared_ptr
<
operators
::
NetOp
>
{
auto
retv
=
std
::
make_shared
<
operators
::
NetOp
>
();
retv
->
SetType
(
"plain_net"
);
return
retv
;
})
.
def
(
"add_op"
,
&
operators
::
NetOp
::
AddOp
)
.
def
(
"add_op"
,
[](
operators
::
NetOp
&
self
,
const
std
::
shared_ptr
<
operators
::
NetOp
>
&
net
)
->
void
{
self
.
AddOp
(
std
::
static_pointer_cast
<
OperatorBase
>
(
net
));
})
.
def
(
"add_op"
,
[](
operators
::
NetOp
&
self
,
const
std
::
shared_ptr
<
operators
::
RecurrentOp
>
&
rnn
)
->
void
{
self
.
AddOp
(
std
::
static_pointer_cast
<
OperatorBase
>
(
rnn
));
py
::
class_
<
OperatorBase
>
(
m
,
"Operator"
)
.
def_static
(
"create"
,
[](
py
::
bytes
protobin
)
{
OpDesc
desc
;
PADDLE_ENFORCE
(
desc
.
ParsePartialFromString
(
protobin
),
"Cannot parse user input to OpDesc"
);
PADDLE_ENFORCE
(
desc
.
IsInitialized
(),
"User OpDesc is not initialized, reason %s"
,
desc
.
InitializationErrorString
());
return
OpRegistry
::
CreateOp
(
desc
);
})
.
def
(
"backward"
,
[](
const
OperatorBase
&
forwardOp
,
const
std
::
unordered_set
<
std
::
string
>
&
no_grad_vars
)
{
return
Backward
(
forwardOp
,
no_grad_vars
).
release
();
})
.
def
(
"infer_shape"
,
&
OperatorBase
::
InferShape
)
.
def
(
"run"
,
&
OperatorBase
::
Run
)
.
def
(
"type"
,
[](
const
OperatorBase
&
op
)
->
std
::
string
{
return
op
.
Type
();
})
.
def
(
"outputs"
,
[](
const
OperatorBase
&
op
)
->
std
::
map
<
std
::
string
,
std
::
vector
<
std
::
string
>>
{
return
op
.
Outputs
();
})
.
def
(
"inputs"
,
[](
const
OperatorBase
&
op
)
{
return
op
.
Inputs
();
})
.
def
(
"__str__"
,
&
OperatorBase
::
DebugString
)
.
def
(
"no_intermediate_outputs"
,
[](
const
OperatorBase
&
op
)
{
return
op
.
OutputVars
(
false
);
})
.
def
(
"support_gpu"
,
&
OperatorBase
::
SupportGPU
);
py
::
class_
<
operators
::
NetOp
,
OperatorBase
>
(
m
,
"Net"
)
.
def_static
(
"create"
,
[]()
->
operators
::
NetOp
*
{
auto
*
retv
=
new
operators
::
NetOp
;
retv
->
SetType
(
"plain_net"
);
return
retv
;
})
.
def
(
"add_op"
,
[](
operators
::
NetOp
&
self
,
const
OperatorBase
&
op
)
{
self
.
AddOp
(
op
);
})
.
def
(
"complete_add_op"
,
&
operators
::
NetOp
::
CompleteAddOp
)
.
def
(
"complete_add_op"
,
[](
std
::
shared_ptr
<
operators
::
NetOp
>
&
self
)
{
self
->
CompleteAddOp
();
});
ExposeOperator
(
net
);
// recurrent_op
py
::
class_
<
operators
::
RecurrentOp
,
std
::
shared_ptr
<
operators
::
RecurrentOp
>>
rnn
(
m
,
"RecurrentOp"
);
rnn
.
def_static
(
"create"
,
[](
py
::
bytes
protobin
)
->
std
::
shared_ptr
<
operators
::
RecurrentOp
>
{
OpDesc
desc
;
PADDLE_ENFORCE
(
desc
.
ParsePartialFromString
(
protobin
),
"Cannot parse user input to OpDesc"
);
PADDLE_ENFORCE
(
desc
.
IsInitialized
(),
"User OpDesc is not initialized, reason %s"
,
desc
.
InitializationErrorString
());
auto
rnn_op
=
OpRegistry
::
CreateOp
(
desc
);
return
std
::
dynamic_pointer_cast
<
operators
::
RecurrentOp
>
(
rnn_op
);
})
.
def
(
"set_stepnet"
,
[](
operators
::
RecurrentOp
&
self
,
const
std
::
shared_ptr
<
operators
::
NetOp
>
&
net
)
->
void
{
self
.
set_stepnet
(
net
);
});
ExposeOperator
(
rnn
);
py
::
class_
<
operators
::
RecurrentOp
,
OperatorBase
>
(
m
,
"RecurrentOp"
)
.
def_static
(
"create"
,
[](
py
::
bytes
protobin
)
->
operators
::
RecurrentOp
*
{
OpDesc
desc
;
PADDLE_ENFORCE
(
desc
.
ParsePartialFromString
(
protobin
),
"Cannot parse user input to OpDesc"
);
PADDLE_ENFORCE
(
desc
.
IsInitialized
(),
"User OpDesc is not initialized, reason %s"
,
desc
.
InitializationErrorString
());
auto
rnn_op
=
OpRegistry
::
CreateOp
(
desc
);
return
static_cast
<
operators
::
RecurrentOp
*>
(
rnn_op
.
release
());
})
.
def
(
"set_stepnet"
,
[](
operators
::
RecurrentOp
&
self
,
const
operators
::
NetOp
&
net
)
->
void
{
self
.
set_stepnet
(
net
.
Clone
());
});
m
.
def
(
"unique_integer"
,
UniqueIntegerGenerator
);
...
...
paddle/memory/memory.cc
浏览文件 @
7f9af125
...
...
@@ -16,6 +16,7 @@ limitations under the License. */
#include <algorithm> // for transform
#include <cstring> // for memcpy
#include <memory> // for unique_ptr
#include <mutex> // for call_once
#include "paddle/memory/detail/buddy_allocator.h"
...
...
paddle/operators/net_op.h
浏览文件 @
7f9af125
...
...
@@ -41,15 +41,13 @@ class NetOp : public framework::OperatorBase {
NetOp
(
const
std
::
string
&
type
,
const
VarNameMap
&
inputs
,
const
VarNameMap
&
outputs
,
const
framework
::
AttributeMap
&
attrs
);
NetOp
(
const
NetOp
&
o
)
:
framework
::
OperatorBase
(
static_cast
<
const
framework
::
OperatorBase
&>
(
o
))
{
NetOp
(
const
NetOp
&
o
)
:
framework
::
OperatorBase
(
o
.
type_
,
{},
{},
o
.
attrs_
)
{
this
->
ops_
.
reserve
(
o
.
ops_
.
size
());
std
::
transform
(
o
.
ops_
.
begin
(),
o
.
ops_
.
end
(),
std
::
back_inserter
(
this
->
ops_
),
[](
const
std
::
shared_ptr
<
OperatorBase
>&
op
)
->
std
::
shared_ptr
<
OperatorBase
>
{
return
std
::
shared_ptr
<
OperatorBase
>
(
op
->
Clone
());
});
std
::
transform
(
o
.
ops_
.
begin
(),
o
.
ops_
.
end
(),
std
::
back_inserter
(
this
->
ops_
),
[](
const
std
::
unique_ptr
<
framework
::
OperatorBase
>&
op
)
{
return
std
::
unique_ptr
<
framework
::
OperatorBase
>
(
op
->
Clone
());
});
this
->
CompleteAddOp
();
}
...
...
@@ -86,21 +84,27 @@ class NetOp : public framework::OperatorBase {
return
true
;
}
void
AddOp
(
const
framework
::
OperatorBase
&
op
)
{
AddOp
(
op
.
Clone
());
}
/**
* @brief Add an operator by ptr
*/
void
AddOp
(
const
std
::
shared_ptr
<
OperatorBase
>&
op
)
{
void
AddOp
(
std
::
unique_ptr
<
framework
::
OperatorBase
>
op
)
{
PADDLE_ENFORCE
(
!
add_op_done_
,
"Cannot AddOp when this network is sealed"
);
PADDLE_ENFORCE_NOT_NULL
(
op
,
"Cannot Insert Null op"
);
ops_
.
push_back
(
op
);
ops_
.
push_back
(
std
::
move
(
op
)
);
}
void
InsertOp
(
size_t
pos
,
const
std
::
shared_ptr
<
OperatorBase
>&
op
)
{
void
InsertOp
(
size_t
pos
,
std
::
unique_ptr
<
framework
::
OperatorBase
>
op
)
{
PADDLE_ENFORCE
(
!
add_op_done_
,
"Cannot InsertOp when this network is sealed"
);
PADDLE_ENFORCE_NOT_NULL
(
op
,
"Cannot Insert Null op"
);
PADDLE_ENFORCE_LE
(
pos
,
ops_
.
size
(),
"Out of range"
);
ops_
.
insert
(
ops_
.
begin
()
+
pos
,
op
);
ops_
.
insert
(
ops_
.
begin
()
+
pos
,
std
::
move
(
op
));
}
void
InsertOp
(
size_t
pos
,
const
framework
::
OperatorBase
&
op
)
{
InsertOp
(
pos
,
op
.
Clone
());
}
void
CompleteAddOp
(
bool
calculate
=
true
);
...
...
@@ -112,7 +116,7 @@ class NetOp : public framework::OperatorBase {
std
::
unique_ptr
<
framework
::
OperatorBase
>
Clone
()
const
override
;
std
::
vector
<
std
::
shared_ptr
<
OperatorBase
>>
ops_
;
std
::
vector
<
std
::
unique_ptr
<
framework
::
OperatorBase
>>
ops_
;
private:
bool
add_op_done_
{
false
};
...
...
paddle/operators/net_op_test.cc
浏览文件 @
7f9af125
...
...
@@ -38,15 +38,12 @@ TEST(OpKernel, all) {
auto
net
=
std
::
make_shared
<
NetOp
>
();
ASSERT_NE
(
net
,
nullptr
);
auto
op1
=
std
::
shared
_ptr
<
TestOp
>
(
net
->
AddOp
(
std
::
unique
_ptr
<
TestOp
>
(
new
TestOp
(
"test"
,
{{
"X"
,
{
"x"
}},
{
"W"
,
{
"w1"
}},
{
"b"
,
{
"b1"
}}},
{{
"Out"
,
{
"y"
}}},
{}));
net
->
AddOp
(
op1
);
auto
op2
=
std
::
shared_ptr
<
TestOp
>
(
{{
"Out"
,
{
"y"
}}},
{})));
net
->
AddOp
(
std
::
unique_ptr
<
TestOp
>
(
new
TestOp
(
"test"
,
{{
"X"
,
{
"y"
}},
{
"W"
,
{
"w2"
}},
{
"b"
,
{
"b2"
}}},
{{
"Out"
,
{
"z"
}}},
{}));
net
->
AddOp
(
op2
);
{{
"Out"
,
{
"z"
}}},
{})));
net
->
CompleteAddOp
();
AssertSameVectorWithoutOrder
({
"x"
,
"w1"
,
"b1"
,
"w2"
,
"b2"
},
...
...
@@ -61,21 +58,21 @@ TEST(OpKernel, all) {
TEST
(
NetOp
,
insert_op
)
{
NetOp
net
;
auto
op1
=
std
::
shared
_ptr
<
framework
::
NOP
>
(
auto
op1
=
std
::
unique
_ptr
<
framework
::
NOP
>
(
new
framework
::
NOP
(
"empty"
,
{{
"X"
,
{
"x"
}},
{
"W"
,
{
"w1"
}},
{
"b"
,
{
"b1"
}}},
{{
"Out"
,
{
"y"
}}},
{}));
net
.
AddOp
(
op1
);
net
.
InsertOp
(
0
,
op1
);
net
.
AddOp
(
*
op1
);
net
.
InsertOp
(
0
,
*
op1
);
ASSERT_EQ
(
2UL
,
net
.
ops_
.
size
());
net
.
InsertOp
(
2
,
op1
);
net
.
InsertOp
(
2
,
std
::
move
(
op1
)
);
ASSERT_EQ
(
3UL
,
net
.
ops_
.
size
());
}
TEST
(
NetOp
,
Clone
)
{
NetOp
net
;
net
.
AddOp
(
std
::
shared
_ptr
<
framework
::
NOP
>
(
new
framework
::
NOP
{
"empty"
,
{},
{},
{}}));
net
.
AddOp
(
std
::
shared
_ptr
<
framework
::
NOP
>
(
std
::
unique
_ptr
<
framework
::
NOP
>
(
new
framework
::
NOP
{
"empty"
,
{},
{},
{}}));
net
.
AddOp
(
std
::
unique
_ptr
<
framework
::
NOP
>
(
new
framework
::
NOP
{
"empty2"
,
{},
{},
{}}));
net
.
CompleteAddOp
(
true
);
auto
new_net_op
=
net
.
Clone
();
...
...
paddle/operators/recurrent_op.h
浏览文件 @
7f9af125
...
...
@@ -34,7 +34,8 @@ class RecurrentAlgorithm {
void
Run
(
const
framework
::
Scope
&
scope
,
const
platform
::
DeviceContext
&
dev_ctx
)
const
;
void
Init
(
rnn
::
Argument
*
arg
,
std
::
shared_ptr
<
NetOp
>*
stepnet
)
{
void
Init
(
rnn
::
Argument
*
arg
,
std
::
unique_ptr
<
framework
::
OperatorBase
>*
stepnet
)
{
PADDLE_ENFORCE_NOT_NULL
(
stepnet
,
"stepnet should be set before."
);
arg_
=
arg
;
stepnet_
=
stepnet
;
...
...
@@ -63,7 +64,7 @@ class RecurrentAlgorithm {
void
InitMemories
(
framework
::
Scope
*
step_scopes
,
bool
infer_shape_mode
)
const
;
private:
std
::
shared_ptr
<
NetOp
>*
stepnet_
;
std
::
unique_ptr
<
framework
::
OperatorBase
>*
stepnet_
;
rnn
::
Argument
*
arg_
;
mutable
size_t
seq_len_
;
};
...
...
@@ -80,7 +81,8 @@ class RecurrentGradientAlgorithm {
* operator.
*/
public:
void
Init
(
rnn
::
Argument
*
arg
,
std
::
shared_ptr
<
NetOp
>*
stepnet
)
{
void
Init
(
rnn
::
Argument
*
arg
,
std
::
unique_ptr
<
framework
::
OperatorBase
>*
stepnet
)
{
PADDLE_ENFORCE_NOT_NULL
(
stepnet
,
"stepnet should be set before."
);
arg_
=
std
::
move
(
arg
);
stepnet_
=
stepnet
;
...
...
@@ -107,7 +109,7 @@ class RecurrentGradientAlgorithm {
private:
rnn
::
Argument
*
arg_
;
mutable
size_t
seq_len_
;
std
::
shared_ptr
<
NetOp
>*
stepnet_
;
std
::
unique_ptr
<
framework
::
OperatorBase
>*
stepnet_
;
};
class
RecurrentOp
:
public
framework
::
OperatorBase
{
...
...
@@ -133,15 +135,17 @@ class RecurrentOp : public framework::OperatorBase {
alg_
.
Run
(
scope
,
dev_ctx
);
}
void
set_stepnet
(
std
::
shared_ptr
<
NetOp
>
net
)
{
stepnet_
=
net
;
}
const
NetOp
&
stepnet
()
const
{
return
*
stepnet_
;
}
void
set_stepnet
(
std
::
unique_ptr
<
OperatorBase
>
net
)
{
stepnet_
=
std
::
move
(
net
);
}
const
OperatorBase
&
stepnet
()
const
{
return
*
stepnet_
;
}
static
const
rnn
::
ArgumentName
kArgName
;
private:
RecurrentAlgorithm
alg_
;
rnn
::
Argument
arg_
;
std
::
shared_ptr
<
NetOp
>
stepnet_
;
std
::
unique_ptr
<
OperatorBase
>
stepnet_
;
};
class
RecurrentGradientOp
:
public
framework
::
OperatorBase
{
...
...
@@ -171,12 +175,14 @@ class RecurrentGradientOp : public framework::OperatorBase {
static
const
rnn
::
ArgumentName
kArgName
;
void
set_stepnet
(
const
std
::
shared_ptr
<
NetOp
>&
net
)
{
stepnet_
=
net
;
}
const
NetOp
&
stepnet
()
const
{
return
*
stepnet_
;
}
void
set_stepnet
(
std
::
unique_ptr
<
OperatorBase
>
net
)
{
stepnet_
=
std
::
move
(
net
);
}
const
OperatorBase
&
stepnet
()
const
{
return
*
stepnet_
;
}
private:
RecurrentGradientAlgorithm
alg_
;
std
::
shared_ptr
<
NetOp
>
stepnet_
;
std
::
unique_ptr
<
OperatorBase
>
stepnet_
;
rnn
::
Argument
arg_
;
};
...
...
python/paddle/v2/framework/tests/gradient_checker.py
浏览文件 @
7f9af125
...
...
@@ -165,7 +165,6 @@ class GradientChecker(unittest.TestCase):
for
no_grad
in
no_grad_set
:
if
no_grad
not
in
in_names
:
raise
ValueError
(
"no_grad should be in in_names"
)
backward_op
=
core
.
Operator
.
backward
(
forward_op
,
no_grad_set
)
bwd_outputs
=
backward_op
.
outputs
()
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录