Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
Paddle
提交
a5241e1d
P
Paddle
项目概览
PaddlePaddle
/
Paddle
大约 1 年 前同步成功
通知
2299
Star
20931
Fork
5422
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1423
列表
看板
标记
里程碑
合并请求
543
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1,423
Issue
1,423
列表
看板
标记
里程碑
合并请求
543
合并请求
543
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
a5241e1d
编写于
6月 10, 2019
作者:
Y
Yan Chunwei
提交者:
GitHub
6月 10, 2019
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
refine program (#17726)
上级
01e1cdac
变更
13
显示空白变更内容
内联
并排
Showing
13 changed file
with
95 addition
and
83 deletion
+95
-83
paddle/fluid/framework/op_desc.cc
paddle/fluid/framework/op_desc.cc
+2
-1
paddle/fluid/lite/api/light_api.h
paddle/fluid/lite/api/light_api.h
+4
-4
paddle/fluid/lite/core/CMakeLists.txt
paddle/fluid/lite/core/CMakeLists.txt
+1
-1
paddle/fluid/lite/core/mir/generate_program_pass.h
paddle/fluid/lite/core/mir/generate_program_pass.h
+1
-1
paddle/fluid/lite/core/mir/ssa_graph.cc
paddle/fluid/lite/core/mir/ssa_graph.cc
+3
-3
paddle/fluid/lite/core/mir/ssa_graph.h
paddle/fluid/lite/core/mir/ssa_graph.h
+1
-1
paddle/fluid/lite/core/op_lite.h
paddle/fluid/lite/core/op_lite.h
+1
-1
paddle/fluid/lite/core/optimizer.h
paddle/fluid/lite/core/optimizer.h
+1
-1
paddle/fluid/lite/core/program.cc
paddle/fluid/lite/core/program.cc
+40
-0
paddle/fluid/lite/core/program.h
paddle/fluid/lite/core/program.h
+29
-60
paddle/fluid/lite/core/program_fake_utils.h
paddle/fluid/lite/core/program_fake_utils.h
+9
-9
paddle/fluid/lite/model_parser/compatible_pb.cc
paddle/fluid/lite/model_parser/compatible_pb.cc
+2
-1
paddle/fluid/lite/utils/string.h
paddle/fluid/lite/utils/string.h
+1
-0
未找到文件。
paddle/fluid/framework/op_desc.cc
浏览文件 @
a5241e1d
...
@@ -13,12 +13,13 @@ See the License for the specific language governing permissions and
...
@@ -13,12 +13,13 @@ See the License for the specific language governing permissions and
limitations under the License. */
limitations under the License. */
#include "paddle/fluid/framework/op_desc.h"
#include "paddle/fluid/framework/op_desc.h"
#include <glog/logging.h>
#include <algorithm>
#include <algorithm>
#include <functional>
#include <functional>
#include <mutex> // NOLINT
#include <mutex> // NOLINT
#include <string>
#include <string>
#include <unordered_map>
#include <unordered_map>
#include
"glog/logging.h"
#include
<utility>
#include "paddle/fluid/framework/block_desc.h"
#include "paddle/fluid/framework/block_desc.h"
#include "paddle/fluid/framework/op_proto_maker.h"
#include "paddle/fluid/framework/op_proto_maker.h"
#include "paddle/fluid/framework/operator.h"
#include "paddle/fluid/framework/operator.h"
...
...
paddle/fluid/lite/api/light_api.h
浏览文件 @
a5241e1d
...
@@ -64,7 +64,7 @@ class LightPredictor {
...
@@ -64,7 +64,7 @@ class LightPredictor {
private:
private:
void
BuildRuntimeProgram
(
const
framework
::
proto
::
ProgramDesc
&
prog
)
{
void
BuildRuntimeProgram
(
const
framework
::
proto
::
ProgramDesc
&
prog
)
{
std
::
vector
<
Instruct
>
insts
;
std
::
vector
<
Instruct
ion
>
insts
;
// 1. Create op first
// 1. Create op first
Program
program
(
prog
,
scope_
,
{});
Program
program
(
prog
,
scope_
,
{});
...
@@ -72,7 +72,7 @@ class LightPredictor {
...
@@ -72,7 +72,7 @@ class LightPredictor {
// Create the kernels of the target places, and filter out the specific
// Create the kernels of the target places, and filter out the specific
// kernel with the target alias.
// kernel with the target alias.
for
(
auto
&
op
:
program
.
ops
)
{
for
(
auto
&
op
:
program
.
ops
_
)
{
lite
::
pb
::
OpDesc
desc
(
op
->
op_info
()
->
desc
());
lite
::
pb
::
OpDesc
desc
(
op
->
op_info
()
->
desc
());
auto
kernel_type
=
desc
.
GetAttr
(
kKernelTypeAttr
).
get
<
std
::
string
>
();
auto
kernel_type
=
desc
.
GetAttr
(
kKernelTypeAttr
).
get
<
std
::
string
>
();
std
::
string
op_type
,
alias
;
std
::
string
op_type
,
alias
;
...
@@ -89,8 +89,8 @@ class LightPredictor {
...
@@ -89,8 +89,8 @@ class LightPredictor {
insts
.
emplace_back
(
op
,
std
::
move
(
*
it
));
insts
.
emplace_back
(
op
,
std
::
move
(
*
it
));
}
}
program_
.
reset
(
new
RuntimeProgram
(
std
::
move
(
insts
)));
program_
.
reset
(
new
RuntimeProgram
(
std
::
move
(
insts
)));
CHECK
(
program
.
exec_scope
);
CHECK
(
program
.
exec_scope
_
);
program_
->
set_exec_scope
(
program
.
exec_scope
);
program_
->
set_exec_scope
(
program
.
exec_scope
_
);
}
}
private:
private:
...
...
paddle/fluid/lite/core/CMakeLists.txt
浏览文件 @
a5241e1d
...
@@ -30,7 +30,7 @@ cc_library(op_lite SRCS op_lite.cc DEPS scope_lite op_registry_lite target_wrapp
...
@@ -30,7 +30,7 @@ cc_library(op_lite SRCS op_lite.cc DEPS scope_lite op_registry_lite target_wrapp
cc_library
(
types_lite SRCS types.cc
)
cc_library
(
types_lite SRCS types.cc
)
cc_library
(
type_system SRCS type_system.cc DEPS
${
tensor_lite
}
target_wrapper_lite
)
cc_library
(
type_system SRCS type_system.cc DEPS
${
tensor_lite
}
target_wrapper_lite
)
cc_library
(
program_lite SRCS program.cc DEPS op_lite kernel_lite
)
cc_library
(
program_lite SRCS program.cc DEPS op_lite kernel_lite
compatible_pb_lite
)
cc_library
(
optimizer_lite SRCS optimizer.cc DEPS mir_pass_manager model_parser_lite program_lite
)
cc_library
(
optimizer_lite SRCS optimizer.cc DEPS mir_pass_manager model_parser_lite program_lite
)
add_subdirectory
(
mir
)
add_subdirectory
(
mir
)
...
...
paddle/fluid/lite/core/mir/generate_program_pass.h
浏览文件 @
a5241e1d
...
@@ -41,7 +41,7 @@ class GenerateProgramPass : public ProgramPass {
...
@@ -41,7 +41,7 @@ class GenerateProgramPass : public ProgramPass {
}
}
private:
private:
std
::
vector
<
Instruct
>
insts_
;
std
::
vector
<
Instruct
ion
>
insts_
;
};
};
}
// namespace mir
}
// namespace mir
...
...
paddle/fluid/lite/core/mir/ssa_graph.cc
浏览文件 @
a5241e1d
...
@@ -94,7 +94,7 @@ std::vector<mir::Node *> SSAGraph::StmtTopologicalOrder() {
...
@@ -94,7 +94,7 @@ std::vector<mir::Node *> SSAGraph::StmtTopologicalOrder() {
}
}
void
SSAGraph
::
GraphCreateTmpVarNodes
(
const
Program
&
program
)
{
void
SSAGraph
::
GraphCreateTmpVarNodes
(
const
Program
&
program
)
{
for
(
const
auto
&
name
:
program
.
tmp_vars
)
{
for
(
const
auto
&
name
:
program
.
tmp_vars
()
)
{
CHECK
(
!
arguments_
.
count
(
name
))
<<
"duplicate creating temp variable: "
CHECK
(
!
arguments_
.
count
(
name
))
<<
"duplicate creating temp variable: "
<<
name
;
<<
name
;
VLOG
(
5
)
<<
"create arg node "
<<
name
;
VLOG
(
5
)
<<
"create arg node "
<<
name
;
...
@@ -107,7 +107,7 @@ void SSAGraph::GraphCreateTmpVarNodes(const Program &program) {
...
@@ -107,7 +107,7 @@ void SSAGraph::GraphCreateTmpVarNodes(const Program &program) {
void
SSAGraph
::
GraphCreateWeightVarNodes
(
const
Program
&
program
)
{
void
SSAGraph
::
GraphCreateWeightVarNodes
(
const
Program
&
program
)
{
// create weight nodes.
// create weight nodes.
for
(
const
auto
&
name
:
program
.
weights
)
{
for
(
const
auto
&
name
:
program
.
weights
()
)
{
CHECK
(
!
arguments_
.
count
(
name
))
<<
"duplicate creating weight variable: "
CHECK
(
!
arguments_
.
count
(
name
))
<<
"duplicate creating weight variable: "
<<
name
;
<<
name
;
VLOG
(
5
)
<<
"create arg node "
<<
name
;
VLOG
(
5
)
<<
"create arg node "
<<
name
;
...
@@ -140,7 +140,7 @@ void SSAGraph::Build(const Program &program,
...
@@ -140,7 +140,7 @@ void SSAGraph::Build(const Program &program,
GraphCreateWeightVarNodes
(
program
);
GraphCreateWeightVarNodes
(
program
);
CHECK
(
CheckNodesRoleSet
());
CHECK
(
CheckNodesRoleSet
());
for
(
auto
&
op
:
program
.
ops
)
{
for
(
auto
&
op
:
program
.
ops
()
)
{
auto
*
op_node
=
GraphCreateInstructNode
(
program
,
op
,
valid_places
);
auto
*
op_node
=
GraphCreateInstructNode
(
program
,
op
,
valid_places
);
for
(
const
std
::
string
&
name
:
op
->
op_info
()
->
input_names
())
{
for
(
const
std
::
string
&
name
:
op
->
op_info
()
->
input_names
())
{
auto
*
arg
=
Argument
(
name
);
auto
*
arg
=
Argument
(
name
);
...
...
paddle/fluid/lite/core/mir/ssa_graph.h
浏览文件 @
a5241e1d
...
@@ -77,7 +77,7 @@ class SSAGraph : GraphBase {
...
@@ -77,7 +77,7 @@ class SSAGraph : GraphBase {
bool
CheckLinksRoleSet
();
bool
CheckLinksRoleSet
();
void
MarkArgumentWeights
(
const
Program
&
program
)
{
void
MarkArgumentWeights
(
const
Program
&
program
)
{
for
(
const
auto
&
name
:
program
.
weights
)
{
for
(
const
auto
&
name
:
program
.
weights
()
)
{
arguments_
[
name
]
->
AsArg
().
is_weight
=
true
;
arguments_
[
name
]
->
AsArg
().
is_weight
=
true
;
}
}
}
}
...
...
paddle/fluid/lite/core/op_lite.h
浏览文件 @
a5241e1d
...
@@ -147,7 +147,7 @@ class OpLite : public Registry {
...
@@ -147,7 +147,7 @@ class OpLite : public Registry {
class
OpInfo
:
public
cpp
::
OpDesc
{
class
OpInfo
:
public
cpp
::
OpDesc
{
public:
public:
OpInfo
(
const
OpInfo
&
)
=
default
;
OpInfo
(
const
OpInfo
&
)
=
default
;
OpInfo
(
const
cpp
::
OpDesc
&
other
)
:
cpp
::
OpDesc
(
other
)
{}
explicit
OpInfo
(
const
cpp
::
OpDesc
&
other
)
:
cpp
::
OpDesc
(
other
)
{}
// Collect all the input variable's name.
// Collect all the input variable's name.
std
::
vector
<
std
::
string
>
input_names
()
const
{
std
::
vector
<
std
::
string
>
input_names
()
const
{
...
...
paddle/fluid/lite/core/optimizer.h
浏览文件 @
a5241e1d
...
@@ -64,7 +64,7 @@ class Optimizer {
...
@@ -64,7 +64,7 @@ class Optimizer {
RunPasses
(
passes
);
RunPasses
(
passes
);
}
}
#endif
#endif
exec_scope_
=
program
.
exec_scope
;
exec_scope_
=
program
.
exec_scope
()
;
}
}
void
KernelPickPreferPlace
(
const
Place
&
place
)
{
void
KernelPickPreferPlace
(
const
Place
&
place
)
{
...
...
paddle/fluid/lite/core/program.cc
浏览文件 @
a5241e1d
...
@@ -62,5 +62,45 @@ void RuntimeProgram::SaveParams(const std::string &dir,
...
@@ -62,5 +62,45 @@ void RuntimeProgram::SaveParams(const std::string &dir,
}
}
}
}
void
Program
::
Build
(
const
framework
::
proto
::
ProgramDesc
&
program
)
{
CHECK
(
ops_
.
empty
())
<<
"Executor duplicate Build found"
;
// Create operators.
for
(
const
auto
&
proto_op_desc
:
program
.
blocks
(
0
).
ops
())
{
lite
::
OpDesc
op_desc_dummy
(
proto_op_desc
);
cpp
::
OpDesc
op_desc
;
TransformOpDescPbToCpp
(
op_desc_dummy
,
&
op_desc
);
auto
op_type
=
op_desc
.
Type
();
// if (op_type == "feed" || op_type == "fetch") continue;
VLOG
(
4
)
<<
"create Op ["
<<
op_type
<<
"]"
;
LOG
(
INFO
)
<<
"create Op ["
<<
op_type
<<
"]"
;
auto
op
=
LiteOpRegistry
::
Global
().
Create
(
op_type
);
CHECK
(
op
)
<<
"no Op found for "
<<
op_type
;
ops_
.
emplace_back
(
std
::
move
(
op
));
ops_
.
back
()
->
Attach
(
op_desc
,
exec_scope_
);
}
}
void
Program
::
PrepareWorkspace
(
const
framework
::
proto
::
ProgramDesc
&
program
)
{
CHECK
(
!
exec_scope_
)
<<
"Duplicate PrepareWorkspace found"
;
exec_scope_
=
&
scope_
->
NewScope
();
// Create Feed and Fetch var.
scope_
->
Var
(
"feed"
)
->
GetMutable
<
std
::
vector
<
lite
::
Tensor
>>
();
scope_
->
Var
(
"fetch"
)
->
GetMutable
<
std
::
vector
<
lite
::
Tensor
>>
();
tmp_vars_
.
push_back
(
"feed"
);
tmp_vars_
.
push_back
(
"fetch"
);
CHECK
(
!
program
.
blocks
().
empty
());
for
(
auto
proto_var_desc
:
program
.
blocks
(
0
).
vars
())
{
lite
::
VarDesc
var_desc
(
proto_var_desc
);
if
(
!
var_desc
.
Persistable
())
{
tmp_vars_
.
push_back
(
var_desc
.
Name
());
exec_scope_
->
Var
(
var_desc
.
Name
());
}
else
{
if
(
var_desc
.
Name
()
==
"feed"
||
var_desc
.
Name
()
==
"fetch"
)
continue
;
weights_
.
push_back
(
var_desc
.
Name
());
}
}
}
}
// namespace lite
}
// namespace lite
}
// namespace paddle
}
// namespace paddle
paddle/fluid/lite/core/program.h
浏览文件 @
a5241e1d
...
@@ -37,78 +37,47 @@ static const char kKernelTypeAttr[] = "__@kernel_type_attr@__";
...
@@ -37,78 +37,47 @@ static const char kKernelTypeAttr[] = "__@kernel_type_attr@__";
// - main block, which is a list of OpLite
// - main block, which is a list of OpLite
// - scope: which contains all the weights
// - scope: which contains all the weights
struct
Program
{
struct
Program
{
std
::
list
<
std
::
string
>
tmp_vars
;
public:
std
::
list
<
std
::
string
>
weights
;
explicit
Program
(
const
std
::
shared_ptr
<
Scope
>&
root
)
{
scope_
=
root
;
}
std
::
list
<
std
::
shared_ptr
<
OpLite
>>
ops
;
// the scope to run the kernels, NOTE this is the execution scope.
std
::
shared_ptr
<
lite
::
Scope
>
scope
;
std
::
vector
<
Place
>
valid_places
;
// Runtime scope.
lite
::
Scope
*
exec_scope
{};
const
framework
::
proto
::
ProgramDesc
desc
;
explicit
Program
(
const
std
::
shared_ptr
<
Scope
>&
root
)
{
scope
=
root
;
}
Program
(
const
framework
::
proto
::
ProgramDesc
&
desc
,
Program
(
const
framework
::
proto
::
ProgramDesc
&
desc
,
const
std
::
shared_ptr
<
Scope
>&
root
,
const
std
::
shared_ptr
<
Scope
>&
root
,
const
std
::
vector
<
Place
>&
valid_places
)
const
std
::
vector
<
Place
>&
valid_places
)
:
scope
(
root
),
valid_places
(
valid_places
),
desc
(
desc
)
{
:
scope
_
(
root
),
valid_places_
(
valid_places
),
desc_
(
desc
)
{
CHECK
(
scope
)
<<
"scope should be init first"
;
CHECK
(
scope
_
)
<<
"scope should be init first"
;
PrepareWorkspace
(
desc
);
PrepareWorkspace
(
desc
);
Build
(
desc
);
Build
(
desc
);
}
}
std
::
unique_ptr
<
Program
>
Clone
()
const
{
std
::
unique_ptr
<
Program
>
Clone
()
const
{
std
::
unique_ptr
<
Program
>
res
(
new
Program
(
desc
,
scope
,
valid_places
));
std
::
unique_ptr
<
Program
>
res
(
new
Program
(
desc
_
,
scope_
,
valid_places_
));
return
res
;
return
res
;
}
}
const
std
::
list
<
std
::
string
>&
weights
()
const
{
return
weights_
;
}
const
std
::
list
<
std
::
string
>&
tmp_vars
()
const
{
return
tmp_vars_
;
}
const
std
::
list
<
std
::
shared_ptr
<
OpLite
>>&
ops
()
const
{
return
ops_
;
}
lite
::
Scope
*
exec_scope
()
{
return
exec_scope_
;
}
private:
private:
// Build from a program and scope.
// Build from a program and scope.
void
Build
(
const
framework
::
proto
::
ProgramDesc
&
program
)
{
void
Build
(
const
framework
::
proto
::
ProgramDesc
&
program
);
CHECK
(
ops
.
empty
())
<<
"Executor duplicate Build found"
;
// Create operators.
for
(
const
auto
&
proto_op_desc
:
program
.
blocks
(
0
).
ops
())
{
pb
::
OpDesc
op_desc
(
proto_op_desc
);
auto
op_type
=
op_desc
.
Type
();
// if (op_type == "feed" || op_type == "fetch") continue;
VLOG
(
4
)
<<
"create Op ["
<<
op_type
<<
"]"
;
LOG
(
INFO
)
<<
"create Op ["
<<
op_type
<<
"]"
;
auto
op
=
LiteOpRegistry
::
Global
().
Create
(
op_type
);
CHECK
(
op
)
<<
"no Op found for "
<<
op_type
;
ops
.
emplace_back
(
std
::
move
(
op
));
cpp
::
OpDesc
cpp_op_desc
;
TransformOpDescPbToCpp
(
op_desc
,
&
cpp_op_desc
);
ops
.
back
()
->
Attach
(
cpp_op_desc
,
exec_scope
);
}
}
// Create temporary variables.
// Create temporary variables.
void
PrepareWorkspace
(
const
framework
::
proto
::
ProgramDesc
&
program
)
{
void
PrepareWorkspace
(
const
framework
::
proto
::
ProgramDesc
&
program
);
CHECK
(
!
exec_scope
)
<<
"Duplicate PrepareWorkspace found"
;
exec_scope
=
&
scope
->
NewScope
();
private:
// Create Feed and Fetch var.
std
::
list
<
std
::
string
>
tmp_vars_
;
scope
->
Var
(
"feed"
)
->
GetMutable
<
std
::
vector
<
lite
::
Tensor
>>
();
std
::
list
<
std
::
string
>
weights_
;
scope
->
Var
(
"fetch"
)
->
GetMutable
<
std
::
vector
<
lite
::
Tensor
>>
();
std
::
list
<
std
::
shared_ptr
<
OpLite
>>
ops_
;
// the scope to run the kernels, NOTE this is the execution scope.
tmp_vars
.
push_back
(
"feed"
);
std
::
shared_ptr
<
lite
::
Scope
>
scope_
;
tmp_vars
.
push_back
(
"fetch"
);
std
::
vector
<
Place
>
valid_places_
;
CHECK
(
!
program
.
blocks
().
empty
());
// Runtime scope.
for
(
auto
proto_var_desc
:
program
.
blocks
(
0
).
vars
())
{
lite
::
Scope
*
exec_scope_
{};
lite
::
VarDesc
var_desc
(
proto_var_desc
);
const
framework
::
proto
::
ProgramDesc
desc_
;
if
(
!
var_desc
.
Persistable
())
{
tmp_vars
.
push_back
(
var_desc
.
Name
());
exec_scope
->
Var
(
var_desc
.
Name
());
}
else
{
if
(
var_desc
.
Name
()
==
"feed"
||
var_desc
.
Name
()
==
"fetch"
)
continue
;
weights
.
push_back
(
var_desc
.
Name
());
}
}
}
};
};
struct
Instruct
{
struct
Instruct
ion
{
Instruct
(
const
std
::
shared_ptr
<
OpLite
>&
op
,
Instruct
ion
(
const
std
::
shared_ptr
<
OpLite
>&
op
,
std
::
unique_ptr
<
KernelBase
>&&
kernel
)
std
::
unique_ptr
<
KernelBase
>&&
kernel
)
:
op_
(
op
),
kernel_
(
std
::
move
(
kernel
))
{
:
op_
(
op
),
kernel_
(
std
::
move
(
kernel
))
{
#ifdef LITE_WITH_PROFILE
#ifdef LITE_WITH_PROFILE
...
@@ -132,7 +101,7 @@ struct Instruct {
...
@@ -132,7 +101,7 @@ struct Instruct {
kernel_
->
Launch
();
kernel_
->
Launch
();
}
}
friend
std
::
ostream
&
operator
<<
(
std
::
ostream
&
os
,
const
Instruct
&
other
)
{
friend
std
::
ostream
&
operator
<<
(
std
::
ostream
&
os
,
const
Instruct
ion
&
other
)
{
os
<<
other
.
kernel_
->
summary
()
<<
"
\t
("
<<
other
.
kernel_
->
doc
()
<<
")"
;
os
<<
other
.
kernel_
->
summary
()
<<
"
\t
("
<<
other
.
kernel_
->
doc
()
<<
")"
;
return
os
;
return
os
;
}
}
...
@@ -156,7 +125,7 @@ struct Instruct {
...
@@ -156,7 +125,7 @@ struct Instruct {
*/
*/
class
RuntimeProgram
{
class
RuntimeProgram
{
public:
public:
explicit
RuntimeProgram
(
std
::
vector
<
Instruct
>&&
insts
)
explicit
RuntimeProgram
(
std
::
vector
<
Instruct
ion
>&&
insts
)
:
instructions_
(
std
::
move
(
insts
))
{
:
instructions_
(
std
::
move
(
insts
))
{
if
(
instructions_
.
empty
())
{
if
(
instructions_
.
empty
())
{
LOG
(
FATAL
)
<<
"no instructions"
;
LOG
(
FATAL
)
<<
"no instructions"
;
...
@@ -186,7 +155,7 @@ class RuntimeProgram {
...
@@ -186,7 +155,7 @@ class RuntimeProgram {
private:
private:
RuntimeProgram
(
const
RuntimeProgram
&
)
=
delete
;
RuntimeProgram
(
const
RuntimeProgram
&
)
=
delete
;
std
::
vector
<
Instruct
>
instructions_
;
std
::
vector
<
Instruct
ion
>
instructions_
;
lite
::
Scope
*
exec_scope_
{};
lite
::
Scope
*
exec_scope_
{};
};
};
...
...
paddle/fluid/lite/core/program_fake_utils.h
浏览文件 @
a5241e1d
...
@@ -33,9 +33,9 @@ Program FakeProgram() {
...
@@ -33,9 +33,9 @@ Program FakeProgram() {
std
::
string
w1
=
"w"
+
std
::
to_string
(
id
);
std
::
string
w1
=
"w"
+
std
::
to_string
(
id
);
std
::
string
b1
=
"b"
+
std
::
to_string
(
id
);
std
::
string
b1
=
"b"
+
std
::
to_string
(
id
);
std
::
string
out1
=
"out"
+
std
::
to_string
(
id
);
std
::
string
out1
=
"out"
+
std
::
to_string
(
id
);
auto
w1v
=
program
.
scope
->
Var
(
w1
)
->
GetMutable
<
lite
::
Tensor
>
();
auto
w1v
=
program
.
scope
_
->
Var
(
w1
)
->
GetMutable
<
lite
::
Tensor
>
();
auto
b1v
=
program
.
scope
->
Var
(
b1
)
->
GetMutable
<
lite
::
Tensor
>
();
auto
b1v
=
program
.
scope
_
->
Var
(
b1
)
->
GetMutable
<
lite
::
Tensor
>
();
auto
out1v
=
program
.
scope
->
Var
(
out1
)
->
GetMutable
<
lite
::
Tensor
>
();
auto
out1v
=
program
.
scope
_
->
Var
(
out1
)
->
GetMutable
<
lite
::
Tensor
>
();
lite
::
OpDesc
desc
;
lite
::
OpDesc
desc
;
desc
.
SetInput
(
"Input"
,
{
x
});
desc
.
SetInput
(
"Input"
,
{
x
});
...
@@ -46,12 +46,12 @@ Program FakeProgram() {
...
@@ -46,12 +46,12 @@ Program FakeProgram() {
desc
.
SetAttr
(
"in_num_col_dims"
,
1
);
desc
.
SetAttr
(
"in_num_col_dims"
,
1
);
// add to input
// add to input
program
.
tmp_vars
.
push_back
(
w1
);
program
.
tmp_vars
_
.
push_back
(
w1
);
program
.
tmp_vars
.
push_back
(
b1
);
program
.
tmp_vars
_
.
push_back
(
b1
);
auto
fc_op
=
LiteOpRegistry
::
Global
().
Create
(
"fc"
);
auto
fc_op
=
LiteOpRegistry
::
Global
().
Create
(
"fc"
);
fc_op
->
Attach
(
desc
,
program
.
scope
.
get
());
fc_op
->
Attach
(
desc
,
program
.
scope
_
.
get
());
program
.
ops
.
emplace_back
(
std
::
move
(
fc_op
));
program
.
ops
_
.
emplace_back
(
std
::
move
(
fc_op
));
w1v
->
Resize
(
DDimHvy
(
std
::
vector
<
int64_t
>
({
100
,
100
})));
w1v
->
Resize
(
DDimHvy
(
std
::
vector
<
int64_t
>
({
100
,
100
})));
b1v
->
Resize
(
DDimHvy
(
std
::
vector
<
int64_t
>
({
100
,
1
})));
b1v
->
Resize
(
DDimHvy
(
std
::
vector
<
int64_t
>
({
100
,
1
})));
...
@@ -64,8 +64,8 @@ Program FakeProgram() {
...
@@ -64,8 +64,8 @@ Program FakeProgram() {
// out1, w2, b2 -fc-> out2
// out1, w2, b2 -fc-> out2
std
::
string
x
=
"x"
;
std
::
string
x
=
"x"
;
program
.
tmp_vars
.
push_back
(
x
);
program
.
tmp_vars
_
.
push_back
(
x
);
auto
*
xv
=
program
.
scope
->
Var
(
x
)
->
GetMutable
<
lite
::
Tensor
>
();
auto
*
xv
=
program
.
scope
_
->
Var
(
x
)
->
GetMutable
<
lite
::
Tensor
>
();
xv
->
Resize
(
DDimHvy
(
std
::
vector
<
int64_t
>
({
100
,
100
})));
xv
->
Resize
(
DDimHvy
(
std
::
vector
<
int64_t
>
({
100
,
100
})));
for
(
int
i
=
0
;
i
<
3
;
i
++
)
{
for
(
int
i
=
0
;
i
<
3
;
i
++
)
{
...
...
paddle/fluid/lite/model_parser/compatible_pb.cc
浏览文件 @
a5241e1d
...
@@ -13,7 +13,8 @@
...
@@ -13,7 +13,8 @@
// limitations under the License.
// limitations under the License.
#include "paddle/fluid/lite/model_parser/compatible_pb.h"
#include "paddle/fluid/lite/model_parser/compatible_pb.h"
#include "compatible_pb.h"
#include <string>
#include <vector>
namespace
paddle
{
namespace
paddle
{
namespace
lite
{
namespace
lite
{
...
...
paddle/fluid/lite/utils/string.h
浏览文件 @
a5241e1d
...
@@ -61,6 +61,7 @@ static std::string Join(const std::vector<std::string>& vec,
...
@@ -61,6 +61,7 @@ static std::string Join(const std::vector<std::string>& vec,
if
(
!
vec
.
empty
())
{
if
(
!
vec
.
empty
())
{
ss
<<
vec
.
back
();
ss
<<
vec
.
back
();
}
}
return
ss
.
str
();
return
ss
.
str
();
}
}
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录