Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
4932f752
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
4932f752
编写于
8月 17, 2017
作者:
Y
Yu Yang
浏览文件
操作
浏览文件
下载
差异文件
Merge branch 'develop' of github.com:baidu/Paddle into feature/remove_shared_ptr
上级
4b148d0a
ac61f784
变更
7
隐藏空白更改
内联
并排
Showing
7 changed file
with
199 addition
and
137 deletion
+199
-137
paddle/capi/gradient_machine.cpp
paddle/capi/gradient_machine.cpp
+16
-0
paddle/capi/gradient_machine.h
paddle/capi/gradient_machine.h
+17
-1
paddle/framework/op_registry.cc
paddle/framework/op_registry.cc
+44
-1
paddle/framework/op_registry.h
paddle/framework/op_registry.h
+5
-132
paddle/framework/operator.cc
paddle/framework/operator.cc
+38
-0
paddle/framework/operator.h
paddle/framework/operator.h
+68
-0
python/paddle/v2/trainer.py
python/paddle/v2/trainer.py
+11
-3
未找到文件。
paddle/capi/gradient_machine.cpp
浏览文件 @
4932f752
...
...
@@ -146,3 +146,19 @@ paddle_error paddle_gradient_machine_randomize_param(
m
->
machine
->
randParameters
();
return
kPD_NO_ERROR
;
}
paddle_error
paddle_gradient_machine_get_layer_output
(
paddle_gradient_machine
machine
,
const
char
*
layerName
,
paddle_arguments
args
)
{
auto
m
=
cast
(
machine
);
auto
out
=
paddle
::
capi
::
cast
<
paddle
::
capi
::
CArguments
>
(
args
);
if
(
m
==
nullptr
||
layerName
==
nullptr
||
out
==
nullptr
||
m
->
machine
==
nullptr
)
{
return
kPD_NULLPTR
;
}
auto
layerOutput
=
m
->
machine
->
getLayerOutput
(
layerName
);
out
->
args
.
push_back
(
layerOutput
);
return
kPD_NO_ERROR
;
}
paddle/capi/gradient_machine.h
浏览文件 @
4932f752
...
...
@@ -39,7 +39,11 @@ PD_API paddle_error paddle_gradient_machine_create_for_inference(
/**
* @brief Create a gradient machine used for model inference, using config with
* parameters which is generated by `paddle merge_model`.
* @param [out] machine that used for model inference.
* Example:
* paddle merge_model \
* --model_dir="pass-00000" \
* --model_file="merged_model.paddle"
* @param [out] machine that used for model inference
* @param [in] mergedModel
* @param [in] size
* @return paddle_error
...
...
@@ -97,6 +101,18 @@ paddle_gradient_machine_randomize_param(paddle_gradient_machine machine);
PD_API
paddle_error
paddle_gradient_machine_destroy
(
paddle_gradient_machine
machine
);
/**
* @brief Get the output of the layer named `layerName`.
* @param [in] gradient machine that have run a inference
* @param [in] layerName name of specified layer
* @param [out] args output of the specified layer
* @return paddle_error
*/
PD_API
paddle_error
paddle_gradient_machine_get_layer_output
(
paddle_gradient_machine
machine
,
const
char
*
layerName
,
paddle_arguments
args
);
#ifdef __cplusplus
}
#endif
...
...
paddle/framework/op_registry.cc
浏览文件 @
4932f752
...
...
@@ -17,5 +17,48 @@ limitations under the License. */
#include <vector>
namespace
paddle
{
namespace
framework
{}
// namespace framework
namespace
framework
{
std
::
unique_ptr
<
OperatorBase
>
OpRegistry
::
CreateOp
(
const
std
::
string
&
type
,
const
VarNameMap
&
inputs
,
const
VarNameMap
&
outputs
,
AttributeMap
attrs
)
{
auto
it
=
op_info_map
().
find
(
type
);
PADDLE_ENFORCE
(
it
!=
op_info_map
().
end
(),
"Operator '%s' has not been registered."
,
type
);
it
->
second
.
checker_
->
Check
(
attrs
);
auto
op
=
it
->
second
.
creator_
(
type
,
inputs
,
outputs
,
attrs
);
return
std
::
unique_ptr
<
OperatorBase
>
(
op
);
}
std
::
unique_ptr
<
OperatorBase
>
OpRegistry
::
CreateOp
(
const
OpDesc
&
op_desc
)
{
VarNameMap
inputs
=
ConvertOpDescVarsToVarNameMap
(
op_desc
.
inputs
());
VarNameMap
outputs
=
ConvertOpDescVarsToVarNameMap
(
op_desc
.
outputs
());
AttributeMap
attrs
;
for
(
auto
&
attr
:
op_desc
.
attrs
())
{
attrs
[
attr
.
name
()]
=
GetAttrValue
(
attr
);
}
return
CreateOp
(
op_desc
.
type
(),
inputs
,
outputs
,
attrs
);
}
OperatorBase
::
VarNameMap
OpRegistry
::
ConvertOpDescVarsToVarNameMap
(
const
google
::
protobuf
::
RepeatedPtrField
<
OpDesc
::
Var
>&
op_desc_vars
)
{
VarNameMap
ret_val
;
for
(
auto
&
var
:
op_desc_vars
)
{
auto
&
var_names
=
ret_val
[
var
.
parameter
()];
auto
&
var_names_in_proto
=
var
.
arguments
();
var_names
.
reserve
(
static_cast
<
size_t
>
(
var_names_in_proto
.
size
()));
std
::
copy
(
var_names_in_proto
.
begin
(),
var_names_in_proto
.
end
(),
std
::
back_inserter
(
var_names
));
}
return
ret_val
;
}
std
::
unique_ptr
<
OperatorBase
>
OpRegistry
::
CreateGradOp
(
const
OperatorBase
&
op
)
{
PADDLE_ENFORCE
(
!
op
.
IsNetOp
(),
"Use framework::Backward to get backward ops"
);
return
std
::
unique_ptr
<
OperatorBase
>
(
BuildGradOp
(
&
op
));
}
}
// namespace framework
}
// namespace paddle
paddle/framework/op_registry.h
浏览文件 @
4932f752
...
...
@@ -29,103 +29,6 @@ limitations under the License. */
namespace
paddle
{
namespace
framework
{
// this class not only make proto but also init attribute checkers.
class
OpProtoAndCheckerMaker
{
public:
OpProtoAndCheckerMaker
(
OpProto
*
proto
,
OpAttrChecker
*
op_checker
)
:
proto_
(
proto
),
op_checker_
(
op_checker
)
{}
~
OpProtoAndCheckerMaker
()
{
PADDLE_ENFORCE
(
validated_
,
"should call Validate after build"
);
}
void
Validate
()
{
validated_
=
true
;
CheckNoDuplicatedInOutAttrs
();
}
protected:
struct
VariableBuilder
{
OpProto
::
Var
*
var_
;
VariableBuilder
&
AsDuplicable
()
{
var_
->
set_duplicable
(
true
);
return
*
this
;
}
VariableBuilder
&
AsIntermediate
()
{
var_
->
set_intermediate
(
true
);
return
*
this
;
}
// TODO(FengJiayi, yuyang18): `AsNoGradient` is a very bad name, because it
// means that input/output is not needed when calculate gradient. It does
// not mean no gradient when backward. It should be changed soon.
VariableBuilder
&
AsNoGradient
()
{
var_
->
set_no_gradient
(
true
);
return
*
this
;
}
};
VariableBuilder
AddInput
(
const
std
::
string
&
name
,
const
std
::
string
&
comment
)
{
auto
*
input
=
proto_
->
add_inputs
();
input
->
set_name
(
name
);
input
->
set_comment
(
comment
);
return
VariableBuilder
{
input
};
}
VariableBuilder
AddOutput
(
const
std
::
string
&
name
,
const
std
::
string
&
comment
)
{
auto
*
output
=
proto_
->
add_outputs
();
output
->
set_name
(
name
);
output
->
set_comment
(
comment
);
return
VariableBuilder
{
output
};
}
template
<
typename
T
>
TypedAttrChecker
<
T
>&
AddAttr
(
const
std
::
string
&
name
,
const
std
::
string
&
comment
,
bool
generated
=
false
)
{
auto
*
attr
=
proto_
->
add_attrs
();
attr
->
set_name
(
name
);
attr
->
set_comment
(
comment
);
attr
->
set_generated
(
generated
);
attr
->
set_type
(
AttrTypeID
<
T
>
());
return
op_checker_
->
AddAttrChecker
<
T
>
(
name
);
}
void
AddComment
(
const
std
::
string
&
comment
)
{
proto_
->
set_comment
(
comment
);
}
private:
void
CheckNoDuplicatedInOutAttrs
()
{
std
::
unordered_set
<
std
::
string
>
names
;
auto
checker
=
[
&
](
const
std
::
string
&
name
)
{
PADDLE_ENFORCE
(
!
names
.
count
(
name
),
"[%s] is duplicated"
,
name
);
names
.
insert
(
name
);
};
for
(
auto
&
attr
:
proto_
->
attrs
())
{
checker
(
attr
.
name
());
}
for
(
auto
&
input
:
proto_
->
inputs
())
{
checker
(
input
.
name
());
}
for
(
auto
&
output
:
proto_
->
outputs
())
{
checker
(
output
.
name
());
}
}
OpProto
*
proto_
;
OpAttrChecker
*
op_checker_
;
bool
validated_
{
false
};
};
class
NOPMaker
:
public
OpProtoAndCheckerMaker
{
public:
NOPMaker
(
framework
::
OpProto
*
proto
,
framework
::
OpAttrChecker
*
op_checker
)
:
OpProtoAndCheckerMaker
(
proto
,
op_checker
)
{}
};
class
OpRegistry
{
using
VarNameMap
=
OperatorBase
::
VarNameMap
;
using
OpCreator
=
std
::
function
<
OperatorBase
*
(
...
...
@@ -177,44 +80,14 @@ class OpRegistry {
static
std
::
unique_ptr
<
OperatorBase
>
CreateOp
(
const
std
::
string
&
type
,
const
VarNameMap
&
inputs
,
const
VarNameMap
&
outputs
,
AttributeMap
attrs
)
{
auto
it
=
op_info_map
().
find
(
type
);
PADDLE_ENFORCE
(
it
!=
op_info_map
().
end
(),
"Operator '%s' has not been registered."
,
type
);
it
->
second
.
checker_
->
Check
(
attrs
);
auto
op
=
it
->
second
.
creator_
(
type
,
inputs
,
outputs
,
attrs
);
return
std
::
unique_ptr
<
OperatorBase
>
(
op
);
}
static
VarNameMap
ConvertOpDescVarsToVarNameMap
(
const
google
::
protobuf
::
RepeatedPtrField
<
OpDesc
::
Var
>&
op_desc_vars
)
{
VarNameMap
ret_val
;
for
(
auto
&
var
:
op_desc_vars
)
{
auto
&
var_names
=
ret_val
[
var
.
parameter
()];
auto
&
var_names_in_proto
=
var
.
arguments
();
var_names
.
reserve
(
static_cast
<
size_t
>
(
var_names_in_proto
.
size
()));
std
::
copy
(
var_names_in_proto
.
begin
(),
var_names_in_proto
.
end
(),
std
::
back_inserter
(
var_names
));
}
return
ret_val
;
}
AttributeMap
attrs
);
static
std
::
unique_ptr
<
OperatorBase
>
CreateOp
(
const
OpDesc
&
op_desc
)
{
VarNameMap
inputs
=
ConvertOpDescVarsToVarNameMap
(
op_desc
.
inputs
());
VarNameMap
outputs
=
ConvertOpDescVarsToVarNameMap
(
op_desc
.
outputs
());
AttributeMap
attrs
;
for
(
auto
&
attr
:
op_desc
.
attrs
())
{
attrs
[
attr
.
name
()]
=
GetAttrValue
(
attr
);
}
static
std
::
unique_ptr
<
OperatorBase
>
CreateOp
(
const
OpDesc
&
op_desc
);
return
CreateOp
(
op_desc
.
type
(),
inputs
,
outputs
,
attrs
);
}
static
VarNameMap
ConvertOpDescVarsToVarNameMap
(
const
google
::
protobuf
::
RepeatedPtrField
<
OpDesc
::
Var
>&
op_desc_vars
);
static
std
::
unique_ptr
<
OperatorBase
>
CreateGradOp
(
const
OperatorBase
&
op
)
{
PADDLE_ENFORCE
(
!
op
.
IsNetOp
(),
"Use framework::Backward to get backward ops"
);
return
std
::
unique_ptr
<
OperatorBase
>
(
BuildGradOp
(
&
op
));
}
static
std
::
unique_ptr
<
OperatorBase
>
CreateGradOp
(
const
OperatorBase
&
op
);
static
std
::
unordered_map
<
std
::
string
,
const
OpInfo
>&
op_info_map
()
{
static
std
::
unordered_map
<
std
::
string
,
const
OpInfo
>
op_info_map_
;
...
...
paddle/framework/operator.cc
浏览文件 @
4932f752
...
...
@@ -164,5 +164,43 @@ std::vector<std::string> OperatorBase::OutputVars(bool has_intermediate) const {
return
ret_val
;
}
void
OpProtoAndCheckerMaker
::
Validate
()
{
validated_
=
true
;
CheckNoDuplicatedInOutAttrs
();
}
OpProtoAndCheckerMaker
::
VariableBuilder
OpProtoAndCheckerMaker
::
AddInput
(
const
std
::
string
&
name
,
const
std
::
string
&
comment
)
{
auto
*
input
=
proto_
->
add_inputs
();
input
->
set_name
(
name
);
input
->
set_comment
(
comment
);
return
OpProtoAndCheckerMaker
::
VariableBuilder
{
input
};
}
OpProtoAndCheckerMaker
::
VariableBuilder
OpProtoAndCheckerMaker
::
AddOutput
(
const
std
::
string
&
name
,
const
std
::
string
&
comment
)
{
auto
*
output
=
proto_
->
add_outputs
();
output
->
set_name
(
name
);
output
->
set_comment
(
comment
);
return
OpProtoAndCheckerMaker
::
VariableBuilder
{
output
};
}
void
OpProtoAndCheckerMaker
::
CheckNoDuplicatedInOutAttrs
()
{
std
::
unordered_set
<
std
::
string
>
names
;
auto
checker
=
[
&
](
const
std
::
string
&
name
)
{
PADDLE_ENFORCE
(
!
names
.
count
(
name
),
"[%s] is duplicated"
,
name
);
names
.
insert
(
name
);
};
for
(
auto
&
attr
:
proto_
->
attrs
())
{
checker
(
attr
.
name
());
}
for
(
auto
&
input
:
proto_
->
inputs
())
{
checker
(
input
.
name
());
}
for
(
auto
&
output
:
proto_
->
outputs
())
{
checker
(
output
.
name
());
}
}
}
// namespace framework
}
// namespace paddle
paddle/framework/operator.h
浏览文件 @
4932f752
...
...
@@ -158,6 +158,74 @@ class NOP : public OperatorBase {
}
};
// this class not only make proto but also init attribute checkers.
class
OpProtoAndCheckerMaker
{
public:
OpProtoAndCheckerMaker
(
OpProto
*
proto
,
OpAttrChecker
*
op_checker
)
:
proto_
(
proto
),
op_checker_
(
op_checker
)
{}
~
OpProtoAndCheckerMaker
()
{
PADDLE_ENFORCE
(
validated_
,
"should call Validate after build"
);
}
void
Validate
();
protected:
struct
VariableBuilder
{
OpProto
::
Var
*
var_
;
VariableBuilder
&
AsDuplicable
()
{
var_
->
set_duplicable
(
true
);
return
*
this
;
}
VariableBuilder
&
AsIntermediate
()
{
var_
->
set_intermediate
(
true
);
return
*
this
;
}
// TODO(FengJiayi, yuyang18): `AsNoGradient` is a very bad name, because it
// means that input/output is not needed when calculate gradient. It does
// not mean no gradient when backward. It should be changed soon.
VariableBuilder
&
AsNoGradient
()
{
var_
->
set_no_gradient
(
true
);
return
*
this
;
}
};
VariableBuilder
AddInput
(
const
std
::
string
&
name
,
const
std
::
string
&
comment
);
VariableBuilder
AddOutput
(
const
std
::
string
&
name
,
const
std
::
string
&
comment
);
template
<
typename
T
>
TypedAttrChecker
<
T
>&
AddAttr
(
const
std
::
string
&
name
,
const
std
::
string
&
comment
,
bool
generated
=
false
)
{
auto
*
attr
=
proto_
->
add_attrs
();
attr
->
set_name
(
name
);
attr
->
set_comment
(
comment
);
attr
->
set_generated
(
generated
);
attr
->
set_type
(
AttrTypeID
<
T
>
());
return
op_checker_
->
AddAttrChecker
<
T
>
(
name
);
}
void
AddComment
(
const
std
::
string
&
comment
)
{
proto_
->
set_comment
(
comment
);
}
private:
void
CheckNoDuplicatedInOutAttrs
();
OpProto
*
proto_
;
OpAttrChecker
*
op_checker_
;
bool
validated_
{
false
};
};
class
NOPMaker
:
public
OpProtoAndCheckerMaker
{
public:
NOPMaker
(
framework
::
OpProto
*
proto
,
framework
::
OpAttrChecker
*
op_checker
)
:
OpProtoAndCheckerMaker
(
proto
,
op_checker
)
{}
};
class
InferShapeContext
{
public:
InferShapeContext
(
const
OperatorBase
&
op
,
const
Scope
&
scope
)
...
...
python/paddle/v2/trainer.py
浏览文件 @
4932f752
...
...
@@ -27,16 +27,24 @@ class SGD(object):
SGD Trainer combines data reader, network topolopy and update_equation together
to train/test a neural network.
:param update_equation: The optimizer object.
:type update_equation: paddle.v2.optimizer.Optimizer
:param cost: Target cost that neural network should be optimized.
:type cost: paddle.v2.config_base.Layer
:param parameters: The parameters dictionary.
:type parameters: paddle.v2.parameters.Parameters
:param update_equation: The optimizer object.
:type update_equation: paddle.v2.optimizer.Optimizer
:param extra_layers: Some layers in the neural network graph are not
in the path of cost layer.
:param pserver_spec: pserver location, eg: localhost:3000
:type extra_layers: paddle.v2.config_base.Layer
:param is_local: Whether trainning locally
:type is_local: bool
:param pserver_spec: comma string for pserver location,
eg:127.10.0.10:3000,127.10.0.11:3000,
and this parameter is only used for fault
tolerant mode cluster training.
:type pserver_spec: string
:param use_etcd: Whether using etcd pserver.
:param use_etcd: bool
"""
def
__init__
(
self
,
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录