Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
Paddle-Lite
提交
1ad8f821
P
Paddle-Lite
项目概览
PaddlePaddle
/
Paddle-Lite
通知
338
Star
4
Fork
1
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
271
列表
看板
标记
里程碑
合并请求
78
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle-Lite
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
271
Issue
271
列表
看板
标记
里程碑
合并请求
78
合并请求
78
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
1ad8f821
编写于
5月 20, 2018
作者:
朔-望
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
modify to 2 spaces indent & format code & rm build folder
上级
e35ef6fe
变更
102
显示空白变更内容
内联
并排
Showing
102 changed file
with
13543 addition
and
14086 deletion
+13543
-14086
.clang-format
.clang-format
+0
-1
cmake-build-release/compile_commands.json
cmake-build-release/compile_commands.json
+0
-312
src/common/log.h
src/common/log.h
+86
-88
src/common/types.h
src/common/types.h
+24
-24
src/common/variant.h
src/common/variant.h
+50
-50
src/framework/attribute.h
src/framework/attribute.h
+85
-85
src/framework/block_desc.cpp
src/framework/block_desc.cpp
+16
-16
src/framework/block_desc.h
src/framework/block_desc.h
+23
-24
src/framework/data_layout.h
src/framework/data_layout.h
+29
-29
src/framework/data_transform.cpp
src/framework/data_transform.cpp
+52
-52
src/framework/ddim.cc
src/framework/ddim.cc
+174
-176
src/framework/ddim.h
src/framework/ddim.h
+57
-57
src/framework/dim.h
src/framework/dim.h
+139
-141
src/framework/executor.cpp
src/framework/executor.cpp
+48
-48
src/framework/executor.h
src/framework/executor.h
+12
-12
src/framework/framework.pb.cpp
src/framework/framework.pb.cpp
+5392
-5493
src/framework/framework.pb.h
src/framework/framework.pb.h
+3436
-3483
src/framework/lod_tensor.cc
src/framework/lod_tensor.cc
+230
-232
src/framework/lod_tensor.h
src/framework/lod_tensor.h
+58
-58
src/framework/op_desc.cpp
src/framework/op_desc.cpp
+30
-30
src/framework/op_desc.h
src/framework/op_desc.h
+14
-14
src/framework/op_info.h
src/framework/op_info.h
+50
-50
src/framework/op_kernel_type.h
src/framework/op_kernel_type.h
+22
-23
src/framework/operator.cpp
src/framework/operator.cpp
+1
-1
src/framework/operator.h
src/framework/operator.h
+32
-32
src/framework/paddle_mobile_object.h
src/framework/paddle_mobile_object.h
+7
-7
src/framework/program-optimize/node.cpp
src/framework/program-optimize/node.cpp
+42
-42
src/framework/program-optimize/node.h
src/framework/program-optimize/node.h
+15
-15
src/framework/program-optimize/program_optimize.cpp
src/framework/program-optimize/program_optimize.cpp
+37
-38
src/framework/program-optimize/program_optimize.h
src/framework/program-optimize/program_optimize.h
+9
-9
src/framework/program.h
src/framework/program.h
+5
-5
src/framework/program_desc.cpp
src/framework/program_desc.cpp
+5
-5
src/framework/program_desc.h
src/framework/program_desc.h
+7
-7
src/framework/scope.cc
src/framework/scope.cc
+58
-58
src/framework/scope.h
src/framework/scope.h
+34
-34
src/framework/selected_rows.h
src/framework/selected_rows.h
+48
-48
src/framework/tensor.h
src/framework/tensor.h
+263
-264
src/framework/tensor_util.cc
src/framework/tensor_util.cc
+131
-132
src/framework/tensor_util.h
src/framework/tensor_util.h
+10
-10
src/framework/var_desc.h
src/framework/var_desc.h
+45
-45
src/framework/var_type.h
src/framework/var_type.h
+8
-8
src/framework/variable.h
src/framework/variable.h
+51
-51
src/io.cpp
src/io.cpp
+329
-333
src/io.h
src/io.h
+4
-4
src/memory/t_malloc.cc
src/memory/t_malloc.cc
+13
-13
src/memory/t_malloc.h
src/memory/t_malloc.h
+7
-7
src/operators/batchnorm_op.cpp
src/operators/batchnorm_op.cpp
+2
-2
src/operators/batchnorm_op.h
src/operators/batchnorm_op.h
+19
-19
src/operators/concat_op.cpp
src/operators/concat_op.cpp
+27
-27
src/operators/concat_op.h
src/operators/concat_op.h
+15
-16
src/operators/conv_op.cpp
src/operators/conv_op.cpp
+24
-24
src/operators/conv_op.h
src/operators/conv_op.h
+19
-19
src/operators/elementwise_add_op.cpp
src/operators/elementwise_add_op.cpp
+2
-2
src/operators/elementwise_add_op.h
src/operators/elementwise_add_op.h
+16
-16
src/operators/kernel/arm/batchnorm_kernel.cpp
src/operators/kernel/arm/batchnorm_kernel.cpp
+60
-61
src/operators/kernel/arm/concat_kernel.cpp
src/operators/kernel/arm/concat_kernel.cpp
+70
-71
src/operators/kernel/arm/conv_kernel.cpp
src/operators/kernel/arm/conv_kernel.cpp
+114
-116
src/operators/kernel/arm/elementwise_add_kernel.cpp
src/operators/kernel/arm/elementwise_add_kernel.cpp
+8
-8
src/operators/kernel/arm/lrn_kernel.cpp
src/operators/kernel/arm/lrn_kernel.cpp
+15
-15
src/operators/kernel/arm/mul_kernel.cpp
src/operators/kernel/arm/mul_kernel.cpp
+21
-21
src/operators/kernel/arm/pool_kernel.cpp
src/operators/kernel/arm/pool_kernel.cpp
+37
-37
src/operators/kernel/batchnorm_kernel.h
src/operators/kernel/batchnorm_kernel.h
+2
-2
src/operators/kernel/concat_kernel.h
src/operators/kernel/concat_kernel.h
+2
-2
src/operators/kernel/conv_kernel.h
src/operators/kernel/conv_kernel.h
+2
-2
src/operators/kernel/elementwise_add_kernel.h
src/operators/kernel/elementwise_add_kernel.h
+2
-2
src/operators/kernel/lrn_kernel.h
src/operators/kernel/lrn_kernel.h
+32
-35
src/operators/kernel/mul_kernel.h
src/operators/kernel/mul_kernel.h
+2
-2
src/operators/kernel/pool_kernel.h
src/operators/kernel/pool_kernel.h
+2
-2
src/operators/lrn_op.cpp
src/operators/lrn_op.cpp
+2
-2
src/operators/lrn_op.h
src/operators/lrn_op.h
+18
-18
src/operators/math/elementwise_op_function.h
src/operators/math/elementwise_op_function.h
+137
-138
src/operators/math/im2col.cc
src/operators/math/im2col.cc
+215
-232
src/operators/math/im2col.h
src/operators/math/im2col.h
+9
-10
src/operators/math/math_function.cc
src/operators/math/math_function.cc
+62
-62
src/operators/math/pool3x3.h
src/operators/math/pool3x3.h
+2
-2
src/operators/math/pool_2x2.h
src/operators/math/pool_2x2.h
+2
-2
src/operators/math/pooling.cpp
src/operators/math/pooling.cpp
+47
-49
src/operators/math/pooling.h
src/operators/math/pooling.h
+13
-14
src/operators/math/transform.h
src/operators/math/transform.h
+12
-12
src/operators/math/vol2col.cc
src/operators/math/vol2col.cc
+149
-162
src/operators/math/vol2col.h
src/operators/math/vol2col.h
+8
-8
src/operators/mul_op.cpp
src/operators/mul_op.cpp
+21
-21
src/operators/mul_op.h
src/operators/mul_op.h
+18
-18
src/operators/op_param.cpp
src/operators/op_param.cpp
+18
-19
src/operators/op_param.h
src/operators/op_param.h
+277
-279
src/operators/pool_op.cpp
src/operators/pool_op.cpp
+25
-25
src/operators/pool_op.h
src/operators/pool_op.h
+17
-17
src/platform/data_type.h
src/platform/data_type.h
+83
-83
src/platform/macros.h
src/platform/macros.h
+5
-5
test/common/test_log.cpp
test/common/test_log.cpp
+11
-11
test/framework/executor_for_test.cpp
test/framework/executor_for_test.cpp
+30
-30
test/framework/executor_for_test.h
test/framework/executor_for_test.h
+4
-4
test/framework/test_load.cpp
test/framework/test_load.cpp
+5
-5
test/framework/test_optimize.cpp
test/framework/test_optimize.cpp
+9
-9
test/operators/test_batchnorm_op.cpp
test/operators/test_batchnorm_op.cpp
+134
-138
test/operators/test_concat_op.cpp
test/operators/test_concat_op.cpp
+137
-141
test/operators/test_cov_op.cpp
test/operators/test_cov_op.cpp
+18
-18
test/operators/test_elementwise_add_op.cpp
test/operators/test_elementwise_add_op.cpp
+110
-113
test/operators/test_lrn_op.cpp
test/operators/test_lrn_op.cpp
+110
-115
test/operators/test_mul_op.cpp
test/operators/test_mul_op.cpp
+129
-131
test/operators/test_pool_op.cpp
test/operators/test_pool_op.cpp
+18
-18
test/test_helper.h
test/test_helper.h
+7
-8
未找到文件。
.clang-format
浏览文件 @
1ad8f821
...
...
@@ -2,5 +2,4 @@
Language: Cpp
BasedOnStyle: LLVM
Standard: Cpp11
IndentWidth: 4
...
cmake-build-release/compile_commands.json
已删除
100644 → 0
浏览文件 @
e35ef6fe
[
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/ddim.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/ddim.cc"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/ddim.cc"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/lod_tensor.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/lod_tensor.cc"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/lod_tensor.cc"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/scope.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/scope.cc"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/scope.cc"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/tensor_util.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/tensor_util.cc"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/tensor_util.cc"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/memory/t_malloc.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/memory/t_malloc.cc"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/memory/t_malloc.cc"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/operators/math/im2col.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/math/im2col.cc"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/operators/math/im2col.cc"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/operators/math/math_function.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/math/math_function.cc"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/operators/math/math_function.cc"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/operators/math/vol2col.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/math/vol2col.cc"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/operators/math/vol2col.cc"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/common/variant.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/common/variant.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/common/variant.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/attribute.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/attribute.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/attribute.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/block_desc.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/block_desc.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/block_desc.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/data_transform.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/data_transform.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/data_transform.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/executor.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/executor.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/executor.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/framework.pb.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/framework.pb.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/framework.pb.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/op_desc.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/op_desc.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/op_desc.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/operator.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/operator.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/operator.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/paddle_mobile_object.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/paddle_mobile_object.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/paddle_mobile_object.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/program.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/program.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/program.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/program_desc.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/program_desc.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/program_desc.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/var_desc.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/var_desc.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/var_desc.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/io.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/io.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/io.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/operators/conv_op.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/conv_op.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/operators/conv_op.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/operators/elementwise_add_op.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/elementwise_add_op.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/operators/elementwise_add_op.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/operators/kernel/arm/conv_kernel.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/arm/conv_kernel.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/arm/conv_kernel.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/operators/kernel/arm/elementwise_add_kernel.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/arm/elementwise_add_kernel.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/arm/elementwise_add_kernel.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/operators/kernel/arm/mul_kernel.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/arm/mul_kernel.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/arm/mul_kernel.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/operators/kernel/fpga/conv_kernel.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/fpga/conv_kernel.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/fpga/conv_kernel.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/operators/mul_op.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/mul_op.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/operators/mul_op.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/operators/op_param.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/op_param.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/operators/op_param.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/ddim.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/ddim.cc"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/ddim.cc"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/lod_tensor.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/lod_tensor.cc"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/lod_tensor.cc"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/scope.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/scope.cc"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/scope.cc"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/tensor_util.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/tensor_util.cc"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/tensor_util.cc"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/memory/t_malloc.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/memory/t_malloc.cc"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/memory/t_malloc.cc"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/operators/math/im2col.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/math/im2col.cc"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/operators/math/im2col.cc"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/operators/math/math_function.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/math/math_function.cc"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/operators/math/math_function.cc"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/operators/math/vol2col.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/math/vol2col.cc"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/operators/math/vol2col.cc"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/common/variant.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/common/variant.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/common/variant.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/attribute.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/attribute.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/attribute.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/block_desc.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/block_desc.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/block_desc.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/data_transform.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/data_transform.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/data_transform.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/executor.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/executor.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/executor.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/framework.pb.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/framework.pb.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/framework.pb.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/op_desc.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/op_desc.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/op_desc.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/operator.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/operator.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/operator.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/paddle_mobile_object.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/paddle_mobile_object.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/paddle_mobile_object.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/program.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/program.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/program.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/program_desc.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/program_desc.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/program_desc.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/var_desc.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/var_desc.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/framework/var_desc.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/io.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/io.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/io.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/operators/conv_op.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/conv_op.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/operators/conv_op.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/operators/elementwise_add_op.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/elementwise_add_op.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/operators/elementwise_add_op.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/operators/kernel/arm/conv_kernel.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/arm/conv_kernel.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/arm/conv_kernel.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/operators/kernel/arm/elementwise_add_kernel.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/arm/elementwise_add_kernel.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/arm/elementwise_add_kernel.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/operators/kernel/arm/mul_kernel.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/arm/mul_kernel.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/arm/mul_kernel.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/operators/kernel/fpga/conv_kernel.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/fpga/conv_kernel.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/fpga/conv_kernel.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/operators/mul_op.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/mul_op.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/operators/mul_op.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/operators/op_param.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/op_param.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/src/operators/op_param.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release/test"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/test-log.dir/common/test_log.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/test/common/test_log.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/test/common/test_log.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release/test"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/test-conv-op.dir/operators/test_cov_op.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/test/operators/test_cov_op.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/test/operators/test_cov_op.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release/test"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/test-load.dir/framework/test_load.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/test/framework/test_load.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/test/framework/test_load.cpp"
},
{
"directory"
:
"/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release/test"
,
"command"
:
"/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=
\\\"
true
\\\"
-I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-test.dir/main.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/test/main.cpp"
,
"file"
:
"/Users/allonli/Documents/workspace/paddle-mobile/test/main.cpp"
}
]
\ No newline at end of file
src/common/log.h
浏览文件 @
1ad8f821
...
...
@@ -56,7 +56,7 @@ struct Print {
return
*
this
;
}
private:
private:
void
print
(
LogLevel
level
)
{
buffer_
<<
std
::
endl
;
if
(
level
==
kLOG_ERROR
)
{
...
...
@@ -73,8 +73,7 @@ struct ToLog {
:
level_
(
level
)
{
unsigned
blanks
=
(
unsigned
)(
level
>
kLOG_DEBUG
?
(
level
-
kLOG_DEBUG
)
*
4
:
1
);
printer_
<<
logs
[
level
]
<<
" "
<<
info
<<
":"
<<
std
::
string
(
blanks
,
' '
);
printer_
<<
logs
[
level
]
<<
" "
<<
info
<<
":"
<<
std
::
string
(
blanks
,
' '
);
}
template
<
typename
T
>
ToLog
&
operator
<<
(
T
const
&
value
)
{
...
...
@@ -84,7 +83,7 @@ struct ToLog {
~
ToLog
()
{
printer_
.
print
(
level_
);
}
private:
private:
LogLevel
level_
;
Print
printer_
;
};
...
...
@@ -93,10 +92,10 @@ struct ToLog {
if (level > paddle_mobile::log_level) { \
} else \
paddle_mobile::ToLog( \
level, (std::stringstream() \
level, \
(std::stringstream() \
<< "[file: " \
<< (strrchr(__FILE__, '/') ? (strrchr(__FILE__, '/') + 1) \
: __FILE__) \
<< (strrchr(__FILE__, '/') ? (strrchr(__FILE__, '/') + 1) : __FILE__) \
<< "] [line: " << __LINE__ << "] ") \
.str())
...
...
@@ -107,8 +106,7 @@ struct ToLog {
paddle_mobile::kLOG_DEBUG, \
(std::stringstream() \
<< "[file: " \
<< (strrchr(__FILE__, '/') ? (strrchr(__FILE__, '/') + 1) \
: __FILE__) \
<< (strrchr(__FILE__, '/') ? (strrchr(__FILE__, '/') + 1) : __FILE__) \
<< "] [line: " << __LINE__ << "] ") \
.str())
}
// namespace paddle_mobile
...
...
@@ -144,7 +142,7 @@ struct Print {
friend
struct
ToLog
;
template
<
typename
T
>
Print
&
operator
<<
(
T
const
&
value
)
{}
private:
private:
};
struct
ToLog
{
...
...
src/common/types.h
浏览文件 @
1ad8f821
src/common/variant.h
浏览文件 @
1ad8f821
...
...
@@ -49,7 +49,7 @@ template <typename F> struct VariantHelper<F> {
};
template
<
size_t
size
>
class
RawData
{
public:
public:
char
data
[
size
];
RawData
()
{}
RawData
(
const
RawData
&
raw_data
)
{
strcpy
(
data
,
raw_data
.
data
);
}
...
...
@@ -87,7 +87,7 @@ template <typename... Ts> struct Variant {
size_t
TypeId
()
const
{
return
type_id
;
}
private:
private:
static
inline
size_t
invalid_type
()
{
return
typeid
(
void
).
hash_code
();
}
typedef
VariantHelper
<
Ts
...
>
helper
;
size_t
type_id
;
...
...
src/framework/attribute.h
浏览文件 @
1ad8f821
...
...
@@ -27,7 +27,7 @@ namespace framework {
class
BlockDesc
;
class
Attribute
{
public:
public:
static
Attribute
GetAttrValue
(
const
proto
::
OpDesc
::
Attr
&
attr_desc
)
{
// std::cout << "begin get attr value" << std::endl;
Attribute
attr
;
...
...
@@ -100,7 +100,7 @@ class Attribute {
template
<
typename
T
>
T
&
Get
()
const
{
return
variant_
.
Get
<
T
>
();
}
private:
private:
Variant
<
int
,
float
,
std
::
string
,
std
::
vector
<
int
>
,
std
::
vector
<
float
>
,
std
::
vector
<
std
::
string
>
,
bool
,
std
::
vector
<
bool
>
,
BlockDesc
*
,
int64_t
>
...
...
@@ -110,7 +110,7 @@ class Attribute {
using
AttributeMap
=
std
::
unordered_map
<
std
::
string
,
Attribute
>
;
class
AttrReader
{
public:
public:
explicit
AttrReader
(
const
AttributeMap
&
attrs
)
:
attrs_
(
attrs
)
{}
template
<
typename
T
>
inline
T
Get
(
const
std
::
string
&
name
)
const
{
...
...
@@ -121,7 +121,7 @@ class AttrReader {
return
((
Attribute
)
attrs_
.
at
(
name
)).
Get
<
T
>
();
}
private:
private:
const
AttributeMap
&
attrs_
;
};
...
...
src/framework/block_desc.cpp
浏览文件 @
1ad8f821
src/framework/block_desc.h
浏览文件 @
1ad8f821
...
...
@@ -27,7 +27,7 @@ namespace paddle_mobile {
namespace
framework
{
class
BlockDesc
:
PaddleMobileObject
{
public:
public:
BlockDesc
(
const
proto
::
BlockDesc
&
desc
);
const
int
&
ID
()
const
{
return
desc_
.
idx
();
}
...
...
@@ -35,8 +35,7 @@ class BlockDesc : PaddleMobileObject {
const
int
&
Parent
()
const
{
return
desc_
.
parent_idx
();
}
bool
operator
==
(
const
paddle_mobile
::
framework
::
BlockDesc
&
in_block
)
const
{
return
this
->
ID
()
==
in_block
.
ID
()
&&
this
->
Parent
()
==
in_block
.
Parent
();
return
this
->
ID
()
==
in_block
.
ID
()
&&
this
->
Parent
()
==
in_block
.
Parent
();
}
bool
operator
<
(
const
paddle_mobile
::
framework
::
BlockDesc
&
in_block
)
const
{
...
...
@@ -46,7 +45,7 @@ class BlockDesc : PaddleMobileObject {
std
::
vector
<
std
::
shared_ptr
<
VarDesc
>>
Vars
()
const
;
std
::
vector
<
std
::
shared_ptr
<
OpDesc
>>
Ops
()
const
;
private:
private:
proto
::
BlockDesc
desc_
;
std
::
vector
<
std
::
shared_ptr
<
OpDesc
>>
ops_
;
std
::
unordered_map
<
std
::
string
,
std
::
shared_ptr
<
VarDesc
>>
vars_
;
...
...
src/framework/data_layout.h
浏览文件 @
1ad8f821
src/framework/data_transform.cpp
浏览文件 @
1ad8f821
src/framework/ddim.cc
浏览文件 @
1ad8f821
...
...
@@ -90,26 +90,24 @@ DDim make_ddim(const std::vector<int> &dims) {
// XXX For some reason, putting this in an anonymous namespace causes
// errors
struct
DynamicMutableIndexer
:
Vistor
<
int64_t
&>
{
public:
public:
explicit
DynamicMutableIndexer
(
int
idx
)
:
idx_
(
idx
)
{}
template
<
int
D
>
int64_t
&
operator
()(
Dim
<
D
>
&
dim
)
const
{
return
dim
[
idx_
];
}
template
<
int
D
>
int64_t
&
operator
()(
Dim
<
D
>
&
dim
)
const
{
return
dim
[
idx_
];
}
private:
private:
int
idx_
;
};
struct
DynamicConstIndexer
:
public
Vistor
<
int64_t
>
{
public:
public:
explicit
DynamicConstIndexer
(
int
idx
)
:
idx_
(
idx
)
{}
template
<
int
D
>
int64_t
operator
()(
const
Dim
<
D
>
&
dim
)
const
{
return
dim
[
idx_
];
}
private:
private:
int
idx_
;
};
...
...
@@ -288,7 +286,7 @@ struct OSVistor : Vistor<std::ostream &> {
return
os_
<<
dim
;
}
private:
private:
std
::
ostream
&
os_
;
};
...
...
src/framework/ddim.h
浏览文件 @
1ad8f821
src/framework/dim.h
浏览文件 @
1ad8f821
...
...
@@ -123,9 +123,7 @@ template <> struct DimGetter<0> {
return
d
.
head
;
}
// Return a reference if Dim is mutable
template
<
typename
D
>
HOSTDEVICE
static
int64_t
&
impl
(
D
&
d
)
{
return
d
.
head
;
}
template
<
typename
D
>
HOSTDEVICE
static
int64_t
&
impl
(
D
&
d
)
{
return
d
.
head
;
}
};
template
<
int
D
>
HOSTDEVICE
int64_t
&
indexer
(
Dim
<
D
>
&
dim
,
int
idx
)
{
...
...
src/framework/executor.cpp
浏览文件 @
1ad8f821
src/framework/executor.h
浏览文件 @
1ad8f821
...
...
@@ -35,14 +35,14 @@ namespace paddle_mobile {
namespace
framework
{
template
<
typename
Dtype
>
class
Executor
{
public:
public:
Executor
();
Executor
(
const
Program
<
Dtype
>
p
);
std
::
shared_ptr
<
Tensor
>
predict
(
Tensor
&
t
);
public:
public:
const
framework
::
Program
<
Dtype
>
program_
;
std
::
shared_ptr
<
ProgramDesc
>
to_predict_program_
;
...
...
src/framework/framework.pb.cpp
浏览文件 @
1ad8f821
...
...
@@ -18,74 +18,73 @@ namespace paddle_mobile {
namespace
framework
{
namespace
proto
{
class
OpDesc_AttrDefaultTypeInternal
{
public:
public:
::
google
::
protobuf
::
internal
::
ExplicitlyConstructed
<
OpDesc_Attr
>
_instance
;
}
_OpDesc_Attr_default_instance_
;
class
OpDesc_VarDefaultTypeInternal
{
public:
public:
::
google
::
protobuf
::
internal
::
ExplicitlyConstructed
<
OpDesc_Var
>
_instance
;
}
_OpDesc_Var_default_instance_
;
class
OpDescDefaultTypeInternal
{
public:
public:
::
google
::
protobuf
::
internal
::
ExplicitlyConstructed
<
OpDesc
>
_instance
;
}
_OpDesc_default_instance_
;
class
OpProto_VarDefaultTypeInternal
{
public:
public:
::
google
::
protobuf
::
internal
::
ExplicitlyConstructed
<
OpProto_Var
>
_instance
;
}
_OpProto_Var_default_instance_
;
class
OpProto_AttrDefaultTypeInternal
{
public:
public:
::
google
::
protobuf
::
internal
::
ExplicitlyConstructed
<
OpProto_Attr
>
_instance
;
}
_OpProto_Attr_default_instance_
;
class
OpProtoDefaultTypeInternal
{
public:
public:
::
google
::
protobuf
::
internal
::
ExplicitlyConstructed
<
OpProto
>
_instance
;
}
_OpProto_default_instance_
;
class
VarType_TensorDescDefaultTypeInternal
{
public:
public:
::
google
::
protobuf
::
internal
::
ExplicitlyConstructed
<
VarType_TensorDesc
>
_instance
;
}
_VarType_TensorDesc_default_instance_
;
class
VarType_LoDTensorDescDefaultTypeInternal
{
public:
public:
::
google
::
protobuf
::
internal
::
ExplicitlyConstructed
<
VarType_LoDTensorDesc
>
_instance
;
}
_VarType_LoDTensorDesc_default_instance_
;
class
VarType_LoDTensorArrayDescDefaultTypeInternal
{
public:
public:
::
google
::
protobuf
::
internal
::
ExplicitlyConstructed
<
VarType_LoDTensorArrayDesc
>
_instance
;
}
_VarType_LoDTensorArrayDesc_default_instance_
;
class
VarType_ReaderDescDefaultTypeInternal
{
public:
public:
::
google
::
protobuf
::
internal
::
ExplicitlyConstructed
<
VarType_ReaderDesc
>
_instance
;
}
_VarType_ReaderDesc_default_instance_
;
class
VarType_ChannelDescDefaultTypeInternal
{
public:
public:
::
google
::
protobuf
::
internal
::
ExplicitlyConstructed
<
VarType_ChannelDesc
>
_instance
;
}
_VarType_ChannelDesc_default_instance_
;
class
VarType_TupleDefaultTypeInternal
{
public:
::
google
::
protobuf
::
internal
::
ExplicitlyConstructed
<
VarType_Tuple
>
_instance
;
public:
::
google
::
protobuf
::
internal
::
ExplicitlyConstructed
<
VarType_Tuple
>
_instance
;
}
_VarType_Tuple_default_instance_
;
class
VarTypeDefaultTypeInternal
{
public:
public:
::
google
::
protobuf
::
internal
::
ExplicitlyConstructed
<
VarType
>
_instance
;
}
_VarType_default_instance_
;
class
VarDescDefaultTypeInternal
{
public:
public:
::
google
::
protobuf
::
internal
::
ExplicitlyConstructed
<
VarDesc
>
_instance
;
}
_VarDesc_default_instance_
;
class
BlockDescDefaultTypeInternal
{
public:
public:
::
google
::
protobuf
::
internal
::
ExplicitlyConstructed
<
BlockDesc
>
_instance
;
}
_BlockDesc_default_instance_
;
class
ProgramDescDefaultTypeInternal
{
public:
public:
::
google
::
protobuf
::
internal
::
ExplicitlyConstructed
<
ProgramDesc
>
_instance
;
}
_ProgramDesc_default_instance_
;
...
...
@@ -207,10 +206,9 @@ void TableStruct::InitDefaultsImpl() {
const_cast
<::
paddle_mobile
::
framework
::
proto
::
VarType_Tuple
*>
(
::
paddle_mobile
::
framework
::
proto
::
VarType_Tuple
::
internal_default_instance
());
_VarDesc_default_instance_
.
_instance
.
get_mutable
()
->
type_
=
const_cast
<::
paddle_mobile
::
framework
::
proto
::
VarType
*>
(
::
paddle_mobile
::
framework
::
proto
::
VarType
::
internal_default_instance
());
_VarDesc_default_instance_
.
_instance
.
get_mutable
()
->
type_
=
const_cast
<::
paddle_mobile
::
framework
::
proto
::
VarType
*>
(
::
paddle_mobile
::
framework
::
proto
::
VarType
::
internal_default_instance
());
}
void
InitDefaults
()
{
...
...
@@ -338,8 +336,7 @@ OpDesc_Attr::OpDesc_Attr(const OpDesc_Attr &from)
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
());
if
(
from
.
has_s
())
{
s_
.
AssignWithDefault
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
from
.
s_
);
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
from
.
s_
);
}
::
memcpy
(
&
type_
,
&
from
.
type_
,
static_cast
<
size_t
>
(
reinterpret_cast
<
char
*>
(
&
block_idx_
)
-
...
...
@@ -443,8 +440,8 @@ bool OpDesc_Attr::MergePartialFromCodedStream(
tag
=
p
.
first
;
if
(
!
p
.
second
)
goto
handle_unusual
;
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
// required string name = 1;
case
1
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
...
...
@@ -463,15 +460,12 @@ bool OpDesc_Attr::MergePartialFromCodedStream(
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
16u
/* 16 & 0xFF */
))
{
int
value
;
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
int
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_ENUM
>
(
input
,
&
value
)));
if
(
::
paddle_mobile
::
framework
::
proto
::
AttrType_IsValid
(
value
))
{
set_type
(
static_cast
<
::
paddle_mobile
::
framework
::
proto
::
AttrType
>
(
value
));
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
int
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_ENUM
>
(
input
,
&
value
)));
if
(
::
paddle_mobile
::
framework
::
proto
::
AttrType_IsValid
(
value
))
{
set_type
(
static_cast
<::
paddle_mobile
::
framework
::
proto
::
AttrType
>
(
value
));
}
else
{
unknown_fields_stream
.
WriteVarint32
(
16u
);
unknown_fields_stream
.
WriteVarint32
(
...
...
@@ -488,11 +482,10 @@ bool OpDesc_Attr::MergePartialFromCodedStream(
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
24u
/* 24 & 0xFF */
))
{
set_has_i
();
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
::
google
::
protobuf
::
int32
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_INT32
>
(
input
,
&
i_
)));
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_INT32
>
(
input
,
&
i_
)));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -504,10 +497,9 @@ bool OpDesc_Attr::MergePartialFromCodedStream(
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
37u
/* 37 & 0xFF */
))
{
set_has_f
();
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
float
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_FLOAT
>
(
input
,
&
f_
)));
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
float
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_FLOAT
>
(
input
,
&
f_
)));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -530,19 +522,18 @@ bool OpDesc_Attr::MergePartialFromCodedStream(
case
6
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
48u
/* 48 & 0xFF */
))
{
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadRepeatedPrimitive
<::
google
::
protobuf
::
int32
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_INT32
>
(
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadRepeatedPrimitive
<
::
google
::
protobuf
::
int32
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_INT32
>
(
1
,
48u
,
input
,
this
->
mutable_ints
())));
}
else
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
50u
/* 50 & 0xFF */
))
{
static_cast
<::
google
::
protobuf
::
uint8
>
(
50u
/* 50 & 0xFF */
))
{
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPackedPrimitiveNoInline
<
::
google
::
protobuf
::
int32
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_INT32
>
(
input
,
this
->
mutable_ints
())));
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_INT32
>
(
input
,
this
->
mutable_ints
())));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -553,18 +544,17 @@ bool OpDesc_Attr::MergePartialFromCodedStream(
case
7
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
61u
/* 61 & 0xFF */
))
{
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadRepeatedPrimitive
<
float
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_FLOAT
>
(
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadRepeatedPrimitive
<
float
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_FLOAT
>
(
1
,
61u
,
input
,
this
->
mutable_floats
())));
}
else
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
58u
/* 58 & 0xFF */
))
{
static_cast
<::
google
::
protobuf
::
uint8
>
(
58u
/* 58 & 0xFF */
))
{
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPackedPrimitiveNoInline
<
float
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_FLOAT
>
(
float
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_FLOAT
>
(
input
,
this
->
mutable_floats
())));
}
else
{
goto
handle_unusual
;
...
...
@@ -589,10 +579,9 @@ bool OpDesc_Attr::MergePartialFromCodedStream(
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
80u
/* 80 & 0xFF */
))
{
set_has_b
();
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
bool
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_BOOL
>
(
input
,
&
b_
)));
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
bool
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_BOOL
>
(
input
,
&
b_
)));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -603,18 +592,16 @@ bool OpDesc_Attr::MergePartialFromCodedStream(
case
11
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
88u
/* 88 & 0xFF */
))
{
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadRepeatedPrimitive
<
bool
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_BOOL
>
(
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadRepeatedPrimitive
<
bool
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_BOOL
>
(
1
,
88u
,
input
,
this
->
mutable_bools
())));
}
else
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
90u
/* 90 & 0xFF */
))
{
static_cast
<::
google
::
protobuf
::
uint8
>
(
90u
/* 90 & 0xFF */
))
{
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPackedPrimitiveNoInline
<
bool
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_BOOL
>
(
bool
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_BOOL
>
(
input
,
this
->
mutable_bools
())));
}
else
{
goto
handle_unusual
;
...
...
@@ -627,11 +614,10 @@ bool OpDesc_Attr::MergePartialFromCodedStream(
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
96u
/* 96 & 0xFF */
))
{
set_has_block_idx
();
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
::
google
::
protobuf
::
int32
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_INT32
>
(
input
,
&
block_idx_
)));
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_INT32
>
(
input
,
&
block_idx_
)));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -643,11 +629,10 @@ bool OpDesc_Attr::MergePartialFromCodedStream(
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
104u
/* 104 & 0xFF */
))
{
set_has_l
();
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
::
google
::
protobuf
::
int64
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_INT64
>
(
input
,
&
l_
)));
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_INT64
>
(
input
,
&
l_
)));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -713,14 +698,14 @@ void OpDesc_Attr::SerializeWithCachedSizes(
// repeated int32 ints = 6;
for
(
int
i
=
0
,
n
=
this
->
ints_size
();
i
<
n
;
i
++
)
{
::
google
::
protobuf
::
internal
::
WireFormatLite
::
WriteInt32
(
6
,
this
->
ints
(
i
),
output
);
::
google
::
protobuf
::
internal
::
WireFormatLite
::
WriteInt32
(
6
,
this
->
ints
(
i
),
output
);
}
// repeated float floats = 7;
for
(
int
i
=
0
,
n
=
this
->
floats_size
();
i
<
n
;
i
++
)
{
::
google
::
protobuf
::
internal
::
WireFormatLite
::
WriteFloat
(
7
,
this
->
floats
(
i
),
output
);
::
google
::
protobuf
::
internal
::
WireFormatLite
::
WriteFloat
(
7
,
this
->
floats
(
i
),
output
);
}
// repeated string strings = 8;
...
...
@@ -737,8 +722,8 @@ void OpDesc_Attr::SerializeWithCachedSizes(
// repeated bool bools = 11;
for
(
int
i
=
0
,
n
=
this
->
bools_size
();
i
<
n
;
i
++
)
{
::
google
::
protobuf
::
internal
::
WireFormatLite
::
WriteBool
(
11
,
this
->
bools
(
i
),
output
);
::
google
::
protobuf
::
internal
::
WireFormatLite
::
WriteBool
(
11
,
this
->
bools
(
i
),
output
);
}
// optional int32 block_idx = 12;
...
...
@@ -765,16 +750,14 @@ size_t OpDesc_Attr::RequiredFieldsByteSizeFallback() const {
if
(
has_name
())
{
// required string name = 1;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
this
->
name
());
}
if
(
has_type
())
{
// required .paddle_mobile.framework.proto.AttrType type =
// 2;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
EnumSize
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
EnumSize
(
this
->
type
());
}
...
...
@@ -789,14 +772,12 @@ size_t OpDesc_Attr::ByteSizeLong() const {
if
(((
_has_bits_
[
0
]
&
0x00000005
)
^
0x00000005
)
==
0
)
{
// All required fields are present.
// required string name = 1;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
this
->
name
());
// required .paddle_mobile.framework.proto.AttrType type =
// 2;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
EnumSize
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
EnumSize
(
this
->
type
());
}
else
{
...
...
@@ -805,8 +786,7 @@ size_t OpDesc_Attr::ByteSizeLong() const {
// repeated int32 ints = 6;
{
size_t
data_size
=
::
google
::
protobuf
::
internal
::
WireFormatLite
::
Int32Size
(
this
->
ints_
);
::
google
::
protobuf
::
internal
::
WireFormatLite
::
Int32Size
(
this
->
ints_
);
total_size
+=
1
*
::
google
::
protobuf
::
internal
::
FromIntSize
(
this
->
ints_size
());
total_size
+=
data_size
;
...
...
@@ -841,15 +821,13 @@ size_t OpDesc_Attr::ByteSizeLong() const {
// optional string s = 5;
if
(
has_s
())
{
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
this
->
s
());
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
this
->
s
());
}
if
(
_has_bits_
[
0
/
32
]
&
248u
)
{
// optional int32 i = 3;
if
(
has_i
())
{
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
Int32Size
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
Int32Size
(
this
->
i
());
}
...
...
@@ -865,15 +843,13 @@ size_t OpDesc_Attr::ByteSizeLong() const {
// optional int64 l = 13;
if
(
has_l
())
{
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
Int64Size
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
Int64Size
(
this
->
l
());
}
// optional int32 block_idx = 12;
if
(
has_block_idx
())
{
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
Int32Size
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
Int32Size
(
this
->
block_idx
());
}
}
...
...
@@ -998,15 +974,14 @@ const ::std::string &OpDesc_Attr::name() const {
}
void
OpDesc_Attr
::
set_name
(
const
::
std
::
string
&
value
)
{
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
value
);
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
value
);
// @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpDesc.Attr.name)
}
#if LANG_CXX11
void
OpDesc_Attr
::
set_name
(
::
std
::
string
&&
value
)
{
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
move
(
value
));
// @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpDesc.Attr.name)
}
...
...
@@ -1014,15 +989,13 @@ void OpDesc_Attr::set_name(::std::string &&value) {
void
OpDesc_Attr
::
set_name
(
const
char
*
value
)
{
GOOGLE_DCHECK
(
value
!=
NULL
);
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
string
(
value
));
// @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpDesc.Attr.name)
}
void
OpDesc_Attr
::
set_name
(
const
char
*
value
,
size_t
size
)
{
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
string
(
reinterpret_cast
<
const
char
*>
(
value
),
size
));
// @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpDesc.Attr.name)
}
...
...
@@ -1246,8 +1219,7 @@ void OpDesc_Attr::set_strings(int index, const char *value) {
// @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpDesc.Attr.strings)
}
void
OpDesc_Attr
::
set_strings
(
int
index
,
const
char
*
value
,
size_t
size
)
{
strings_
.
Mutable
(
index
)
->
assign
(
reinterpret_cast
<
const
char
*>
(
value
),
size
);
strings_
.
Mutable
(
index
)
->
assign
(
reinterpret_cast
<
const
char
*>
(
value
),
size
);
// @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpDesc.Attr.strings)
}
::
std
::
string
*
OpDesc_Attr
::
add_strings
()
{
...
...
@@ -1466,8 +1438,8 @@ bool OpDesc_Var::MergePartialFromCodedStream(
tag
=
p
.
first
;
if
(
!
p
.
second
)
goto
handle_unusual
;
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
// required string parameter = 1;
case
1
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
...
...
@@ -1545,8 +1517,7 @@ size_t OpDesc_Var::ByteSizeLong() const {
// required string parameter = 1;
if
(
has_parameter
())
{
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
this
->
parameter
());
}
// repeated string arguments = 2;
...
...
@@ -1684,8 +1655,7 @@ void OpDesc_Var::set_allocated_parameter(::std::string *parameter) {
clear_has_parameter
();
}
parameter_
.
SetAllocatedNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
parameter
);
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
parameter
);
// @@protoc_insertion_point(field_set_allocated:paddle_mobile.framework.proto.OpDesc.Var.parameter)
}
...
...
@@ -1864,15 +1834,15 @@ bool OpDesc::MergePartialFromCodedStream(
tag
=
p
.
first
;
if
(
!
p
.
second
)
goto
handle_unusual
;
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
// repeated .paddle_mobile.framework.proto.OpDesc.Var inputs
// = 1;
case
1
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
10u
/* 10 & 0xFF */
))
{
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
add_inputs
()));
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
add_inputs
()));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -1884,8 +1854,8 @@ bool OpDesc::MergePartialFromCodedStream(
case
2
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
18u
/* 18 & 0xFF */
))
{
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
add_outputs
()));
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
add_outputs
()));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -1909,8 +1879,8 @@ bool OpDesc::MergePartialFromCodedStream(
case
4
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
34u
/* 34 & 0xFF */
))
{
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
add_attrs
()));
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
add_attrs
()));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -1922,10 +1892,9 @@ bool OpDesc::MergePartialFromCodedStream(
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
40u
/* 40 & 0xFF */
))
{
set_has_is_target
();
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
bool
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_BOOL
>
(
input
,
&
is_target_
)));
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
bool
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_BOOL
>
(
input
,
&
is_target_
)));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -1968,8 +1937,7 @@ void OpDesc::SerializeWithCachedSizes(
// repeated .paddle_mobile.framework.proto.OpDesc.Var outputs =
// 2;
for
(
unsigned
int
i
=
0
,
n
=
static_cast
<
unsigned
int
>
(
this
->
outputs_size
());
for
(
unsigned
int
i
=
0
,
n
=
static_cast
<
unsigned
int
>
(
this
->
outputs_size
());
i
<
n
;
i
++
)
{
::
google
::
protobuf
::
internal
::
WireFormatLite
::
WriteMessage
(
2
,
this
->
outputs
(
static_cast
<
int
>
(
i
)),
output
);
...
...
@@ -2010,8 +1978,7 @@ size_t OpDesc::ByteSizeLong() const {
// required string type = 3;
if
(
has_type
())
{
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
this
->
type
());
}
// repeated .paddle_mobile.framework.proto.OpDesc.Var inputs =
...
...
@@ -2020,8 +1987,9 @@ size_t OpDesc::ByteSizeLong() const {
unsigned
int
count
=
static_cast
<
unsigned
int
>
(
this
->
inputs_size
());
total_size
+=
1UL
*
count
;
for
(
unsigned
int
i
=
0
;
i
<
count
;
i
++
)
{
total_size
+=
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
this
->
inputs
(
static_cast
<
int
>
(
i
)));
total_size
+=
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
this
->
inputs
(
static_cast
<
int
>
(
i
)));
}
}
...
...
@@ -2031,8 +1999,9 @@ size_t OpDesc::ByteSizeLong() const {
unsigned
int
count
=
static_cast
<
unsigned
int
>
(
this
->
outputs_size
());
total_size
+=
1UL
*
count
;
for
(
unsigned
int
i
=
0
;
i
<
count
;
i
++
)
{
total_size
+=
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
this
->
outputs
(
static_cast
<
int
>
(
i
)));
total_size
+=
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
this
->
outputs
(
static_cast
<
int
>
(
i
)));
}
}
...
...
@@ -2042,8 +2011,9 @@ size_t OpDesc::ByteSizeLong() const {
unsigned
int
count
=
static_cast
<
unsigned
int
>
(
this
->
attrs_size
());
total_size
+=
1UL
*
count
;
for
(
unsigned
int
i
=
0
;
i
<
count
;
i
++
)
{
total_size
+=
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
this
->
attrs
(
static_cast
<
int
>
(
i
)));
total_size
+=
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
this
->
attrs
(
static_cast
<
int
>
(
i
)));
}
}
...
...
@@ -2148,15 +2118,14 @@ const ::std::string &OpDesc::type() const {
}
void
OpDesc
::
set_type
(
const
::
std
::
string
&
value
)
{
set_has_type
();
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
value
);
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
value
);
// @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpDesc.type)
}
#if LANG_CXX11
void
OpDesc
::
set_type
(
::
std
::
string
&&
value
)
{
set_has_type
();
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
move
(
value
));
// @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpDesc.type)
}
...
...
@@ -2164,15 +2133,13 @@ void OpDesc::set_type(::std::string &&value) {
void
OpDesc
::
set_type
(
const
char
*
value
)
{
GOOGLE_DCHECK
(
value
!=
NULL
);
set_has_type
();
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
string
(
value
));
// @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpDesc.type)
}
void
OpDesc
::
set_type
(
const
char
*
value
,
size_t
size
)
{
set_has_type
();
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
string
(
reinterpret_cast
<
const
char
*>
(
value
),
size
));
// @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpDesc.type)
}
...
...
@@ -2445,8 +2412,8 @@ bool OpProto_Var::MergePartialFromCodedStream(
tag
=
p
.
first
;
if
(
!
p
.
second
)
goto
handle_unusual
;
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
// required string name = 1;
case
1
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
...
...
@@ -2476,10 +2443,9 @@ bool OpProto_Var::MergePartialFromCodedStream(
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
24u
/* 24 & 0xFF */
))
{
set_has_duplicable
();
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
bool
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_BOOL
>
(
input
,
&
duplicable_
)));
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
bool
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_BOOL
>
(
input
,
&
duplicable_
)));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -2491,10 +2457,9 @@ bool OpProto_Var::MergePartialFromCodedStream(
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
32u
/* 32 & 0xFF */
))
{
set_has_intermediate
();
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
bool
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_BOOL
>
(
input
,
&
intermediate_
)));
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
bool
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_BOOL
>
(
input
,
&
intermediate_
)));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -2506,10 +2471,9 @@ bool OpProto_Var::MergePartialFromCodedStream(
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
40u
/* 40 & 0xFF */
))
{
set_has_dispensable
();
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
bool
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_BOOL
>
(
input
,
&
dispensable_
)));
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
bool
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_BOOL
>
(
input
,
&
dispensable_
)));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -2585,15 +2549,13 @@ size_t OpProto_Var::RequiredFieldsByteSizeFallback() const {
if
(
has_name
())
{
// required string name = 1;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
this
->
name
());
}
if
(
has_comment
())
{
// required string comment = 2;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
this
->
comment
());
}
...
...
@@ -2608,13 +2570,11 @@ size_t OpProto_Var::ByteSizeLong() const {
if
(((
_has_bits_
[
0
]
&
0x00000003
)
^
0x00000003
)
==
0
)
{
// All required fields are present.
// required string name = 1;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
this
->
name
());
// required string comment = 2;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
this
->
comment
());
}
else
{
...
...
@@ -2737,15 +2697,14 @@ const ::std::string &OpProto_Var::name() const {
}
void
OpProto_Var
::
set_name
(
const
::
std
::
string
&
value
)
{
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
value
);
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
value
);
// @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpProto.Var.name)
}
#if LANG_CXX11
void
OpProto_Var
::
set_name
(
::
std
::
string
&&
value
)
{
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
move
(
value
));
// @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpProto.Var.name)
}
...
...
@@ -2753,15 +2712,13 @@ void OpProto_Var::set_name(::std::string &&value) {
void
OpProto_Var
::
set_name
(
const
char
*
value
)
{
GOOGLE_DCHECK
(
value
!=
NULL
);
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
string
(
value
));
// @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpProto.Var.name)
}
void
OpProto_Var
::
set_name
(
const
char
*
value
,
size_t
size
)
{
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
string
(
reinterpret_cast
<
const
char
*>
(
value
),
size
));
// @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpProto.Var.name)
}
...
...
@@ -3051,8 +3008,8 @@ bool OpProto_Attr::MergePartialFromCodedStream(
tag
=
p
.
first
;
if
(
!
p
.
second
)
goto
handle_unusual
;
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
// required string name = 1;
case
1
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
...
...
@@ -3071,15 +3028,12 @@ bool OpProto_Attr::MergePartialFromCodedStream(
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
16u
/* 16 & 0xFF */
))
{
int
value
;
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
int
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_ENUM
>
(
input
,
&
value
)));
if
(
::
paddle_mobile
::
framework
::
proto
::
AttrType_IsValid
(
value
))
{
set_type
(
static_cast
<
::
paddle_mobile
::
framework
::
proto
::
AttrType
>
(
value
));
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
int
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_ENUM
>
(
input
,
&
value
)));
if
(
::
paddle_mobile
::
framework
::
proto
::
AttrType_IsValid
(
value
))
{
set_type
(
static_cast
<::
paddle_mobile
::
framework
::
proto
::
AttrType
>
(
value
));
}
else
{
unknown_fields_stream
.
WriteVarint32
(
16u
);
unknown_fields_stream
.
WriteVarint32
(
...
...
@@ -3108,10 +3062,9 @@ bool OpProto_Attr::MergePartialFromCodedStream(
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
32u
/* 32 & 0xFF */
))
{
set_has_generated
();
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
bool
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_BOOL
>
(
input
,
&
generated_
)));
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
bool
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_BOOL
>
(
input
,
&
generated_
)));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -3181,23 +3134,20 @@ size_t OpProto_Attr::RequiredFieldsByteSizeFallback() const {
if
(
has_name
())
{
// required string name = 1;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
this
->
name
());
}
if
(
has_comment
())
{
// required string comment = 3;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
this
->
comment
());
}
if
(
has_type
())
{
// required .paddle_mobile.framework.proto.AttrType type =
// 2;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
EnumSize
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
EnumSize
(
this
->
type
());
}
...
...
@@ -3212,19 +3162,16 @@ size_t OpProto_Attr::ByteSizeLong() const {
if
(((
_has_bits_
[
0
]
&
0x00000007
)
^
0x00000007
)
==
0
)
{
// All required fields are present.
// required string name = 1;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
this
->
name
());
// required string comment = 3;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
this
->
comment
());
// required .paddle_mobile.framework.proto.AttrType type =
// 2;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
EnumSize
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
EnumSize
(
this
->
type
());
}
else
{
...
...
@@ -3332,15 +3279,14 @@ const ::std::string &OpProto_Attr::name() const {
}
void
OpProto_Attr
::
set_name
(
const
::
std
::
string
&
value
)
{
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
value
);
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
value
);
// @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpProto.Attr.name)
}
#if LANG_CXX11
void
OpProto_Attr
::
set_name
(
::
std
::
string
&&
value
)
{
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
move
(
value
));
// @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpProto.Attr.name)
}
...
...
@@ -3348,15 +3294,13 @@ void OpProto_Attr::set_name(::std::string &&value) {
void
OpProto_Attr
::
set_name
(
const
char
*
value
)
{
GOOGLE_DCHECK
(
value
!=
NULL
);
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
string
(
value
));
// @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpProto.Attr.name)
}
void
OpProto_Attr
::
set_name
(
const
char
*
value
,
size_t
size
)
{
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
string
(
reinterpret_cast
<
const
char
*>
(
value
),
size
));
// @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpProto.Attr.name)
}
...
...
@@ -3618,8 +3562,8 @@ bool OpProto::MergePartialFromCodedStream(
tag
=
p
.
first
;
if
(
!
p
.
second
)
goto
handle_unusual
;
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
// required string type = 1;
case
1
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
...
...
@@ -3637,8 +3581,8 @@ bool OpProto::MergePartialFromCodedStream(
case
2
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
18u
/* 18 & 0xFF */
))
{
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
add_inputs
()));
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
add_inputs
()));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -3650,8 +3594,8 @@ bool OpProto::MergePartialFromCodedStream(
case
3
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
26u
/* 26 & 0xFF */
))
{
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
add_outputs
()));
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
add_outputs
()));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -3663,8 +3607,8 @@ bool OpProto::MergePartialFromCodedStream(
case
4
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
34u
/* 34 & 0xFF */
))
{
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
add_attrs
()));
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
add_attrs
()));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -3726,8 +3670,7 @@ void OpProto::SerializeWithCachedSizes(
// repeated .paddle_mobile.framework.proto.OpProto.Var outputs =
// 3;
for
(
unsigned
int
i
=
0
,
n
=
static_cast
<
unsigned
int
>
(
this
->
outputs_size
());
for
(
unsigned
int
i
=
0
,
n
=
static_cast
<
unsigned
int
>
(
this
->
outputs_size
());
i
<
n
;
i
++
)
{
::
google
::
protobuf
::
internal
::
WireFormatLite
::
WriteMessage
(
3
,
this
->
outputs
(
static_cast
<
int
>
(
i
)),
output
);
...
...
@@ -3759,15 +3702,13 @@ size_t OpProto::RequiredFieldsByteSizeFallback() const {
if
(
has_type
())
{
// required string type = 1;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
this
->
type
());
}
if
(
has_comment
())
{
// required string comment = 5;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
this
->
comment
());
}
...
...
@@ -3782,13 +3723,11 @@ size_t OpProto::ByteSizeLong() const {
if
(((
_has_bits_
[
0
]
&
0x00000003
)
^
0x00000003
)
==
0
)
{
// All required fields are present.
// required string type = 1;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
this
->
type
());
// required string comment = 5;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
this
->
comment
());
}
else
{
...
...
@@ -3800,8 +3739,9 @@ size_t OpProto::ByteSizeLong() const {
unsigned
int
count
=
static_cast
<
unsigned
int
>
(
this
->
inputs_size
());
total_size
+=
1UL
*
count
;
for
(
unsigned
int
i
=
0
;
i
<
count
;
i
++
)
{
total_size
+=
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
this
->
inputs
(
static_cast
<
int
>
(
i
)));
total_size
+=
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
this
->
inputs
(
static_cast
<
int
>
(
i
)));
}
}
...
...
@@ -3811,8 +3751,9 @@ size_t OpProto::ByteSizeLong() const {
unsigned
int
count
=
static_cast
<
unsigned
int
>
(
this
->
outputs_size
());
total_size
+=
1UL
*
count
;
for
(
unsigned
int
i
=
0
;
i
<
count
;
i
++
)
{
total_size
+=
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
this
->
outputs
(
static_cast
<
int
>
(
i
)));
total_size
+=
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
this
->
outputs
(
static_cast
<
int
>
(
i
)));
}
}
...
...
@@ -3822,8 +3763,9 @@ size_t OpProto::ByteSizeLong() const {
unsigned
int
count
=
static_cast
<
unsigned
int
>
(
this
->
attrs_size
());
total_size
+=
1UL
*
count
;
for
(
unsigned
int
i
=
0
;
i
<
count
;
i
++
)
{
total_size
+=
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
this
->
attrs
(
static_cast
<
int
>
(
i
)));
total_size
+=
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
this
->
attrs
(
static_cast
<
int
>
(
i
)));
}
}
...
...
@@ -3925,15 +3867,14 @@ const ::std::string &OpProto::type() const {
}
void
OpProto
::
set_type
(
const
::
std
::
string
&
value
)
{
set_has_type
();
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
value
);
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
value
);
// @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpProto.type)
}
#if LANG_CXX11
void
OpProto
::
set_type
(
::
std
::
string
&&
value
)
{
set_has_type
();
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
move
(
value
));
// @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpProto.type)
}
...
...
@@ -3941,15 +3882,13 @@ void OpProto::set_type(::std::string &&value) {
void
OpProto
::
set_type
(
const
char
*
value
)
{
GOOGLE_DCHECK
(
value
!=
NULL
);
set_has_type
();
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
string
(
value
));
// @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpProto.type)
}
void
OpProto
::
set_type
(
const
char
*
value
,
size_t
size
)
{
set_has_type
();
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
string
(
reinterpret_cast
<
const
char
*>
(
value
),
size
));
// @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpProto.type)
}
...
...
@@ -4220,23 +4159,20 @@ bool VarType_TensorDesc::MergePartialFromCodedStream(
tag
=
p
.
first
;
if
(
!
p
.
second
)
goto
handle_unusual
;
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
// required .paddle_mobile.framework.proto.VarType.Type
// data_type = 1;
case
1
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
8u
/* 8 & 0xFF */
))
{
int
value
;
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
int
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_ENUM
>
(
input
,
&
value
)));
if
(
::
paddle_mobile
::
framework
::
proto
::
VarType_Type_IsValid
(
value
))
{
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
int
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_ENUM
>
(
input
,
&
value
)));
if
(
::
paddle_mobile
::
framework
::
proto
::
VarType_Type_IsValid
(
value
))
{
set_data_type
(
static_cast
<
::
paddle_mobile
::
framework
::
proto
::
VarType_Type
>
(
static_cast
<::
paddle_mobile
::
framework
::
proto
::
VarType_Type
>
(
value
));
}
else
{
unknown_fields_stream
.
WriteVarint32
(
8u
);
...
...
@@ -4253,19 +4189,18 @@ bool VarType_TensorDesc::MergePartialFromCodedStream(
case
2
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
16u
/* 16 & 0xFF */
))
{
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadRepeatedPrimitive
<::
google
::
protobuf
::
int64
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_INT64
>
(
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadRepeatedPrimitive
<
::
google
::
protobuf
::
int64
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_INT64
>
(
1
,
16u
,
input
,
this
->
mutable_dims
())));
}
else
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
18u
/* 18 & 0xFF */
))
{
static_cast
<::
google
::
protobuf
::
uint8
>
(
18u
/* 18 & 0xFF */
))
{
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPackedPrimitiveNoInline
<
::
google
::
protobuf
::
int64
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_INT64
>
(
input
,
this
->
mutable_dims
())));
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_INT64
>
(
input
,
this
->
mutable_dims
())));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -4308,8 +4243,8 @@ void VarType_TensorDesc::SerializeWithCachedSizes(
// repeated int64 dims = 2;
for
(
int
i
=
0
,
n
=
this
->
dims_size
();
i
<
n
;
i
++
)
{
::
google
::
protobuf
::
internal
::
WireFormatLite
::
WriteInt64
(
2
,
this
->
dims
(
i
),
output
);
::
google
::
protobuf
::
internal
::
WireFormatLite
::
WriteInt64
(
2
,
this
->
dims
(
i
),
output
);
}
output
->
WriteRaw
(
...
...
@@ -4327,15 +4262,13 @@ size_t VarType_TensorDesc::ByteSizeLong() const {
// required .paddle_mobile.framework.proto.VarType.Type
// data_type = 1;
if
(
has_data_type
())
{
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
EnumSize
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
EnumSize
(
this
->
data_type
());
}
// repeated int64 dims = 2;
{
size_t
data_size
=
::
google
::
protobuf
::
internal
::
WireFormatLite
::
Int64Size
(
this
->
dims_
);
::
google
::
protobuf
::
internal
::
WireFormatLite
::
Int64Size
(
this
->
dims_
);
total_size
+=
1
*
::
google
::
protobuf
::
internal
::
FromIntSize
(
this
->
dims_size
());
total_size
+=
data_size
;
...
...
@@ -4350,8 +4283,7 @@ size_t VarType_TensorDesc::ByteSizeLong() const {
void
VarType_TensorDesc
::
CheckTypeAndMergeFrom
(
const
::
google
::
protobuf
::
MessageLite
&
from
)
{
MergeFrom
(
*::
google
::
protobuf
::
down_cast
<
const
VarType_TensorDesc
*>
(
&
from
));
MergeFrom
(
*::
google
::
protobuf
::
down_cast
<
const
VarType_TensorDesc
*>
(
&
from
));
}
void
VarType_TensorDesc
::
MergeFrom
(
const
VarType_TensorDesc
&
from
)
{
...
...
@@ -4558,16 +4490,16 @@ bool VarType_LoDTensorDesc::MergePartialFromCodedStream(
tag
=
p
.
first
;
if
(
!
p
.
second
)
goto
handle_unusual
;
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
// required
// .paddle_mobile.framework.proto.VarType.TensorDesc tensor
// = 1;
case
1
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
10u
/* 10 & 0xFF */
))
{
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
mutable_tensor
()));
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
mutable_tensor
()));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -4579,11 +4511,10 @@ bool VarType_LoDTensorDesc::MergePartialFromCodedStream(
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
16u
/* 16 & 0xFF */
))
{
set_has_lod_level
();
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
::
google
::
protobuf
::
int32
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_INT32
>
(
input
,
&
lod_level_
)));
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_INT32
>
(
input
,
&
lod_level_
)));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -4646,14 +4577,12 @@ size_t VarType_LoDTensorDesc::ByteSizeLong() const {
// tensor = 1;
if
(
has_tensor
())
{
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
*
this
->
tensor_
);
}
// optional int32 lod_level = 2 [default = 0];
if
(
has_lod_level
())
{
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
Int32Size
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
Int32Size
(
this
->
lod_level
());
}
...
...
@@ -4680,8 +4609,9 @@ void VarType_LoDTensorDesc::MergeFrom(const VarType_LoDTensorDesc &from) {
cached_has_bits
=
from
.
_has_bits_
[
0
];
if
(
cached_has_bits
&
3u
)
{
if
(
cached_has_bits
&
0x00000001u
)
{
mutable_tensor
()
->::
paddle_mobile
::
framework
::
proto
::
VarType_TensorDesc
::
MergeFrom
(
from
.
tensor
());
mutable_tensor
()
->::
paddle_mobile
::
framework
::
proto
::
VarType_TensorDesc
::
MergeFrom
(
from
.
tensor
());
}
if
(
cached_has_bits
&
0x00000002u
)
{
lod_level_
=
from
.
lod_level_
;
...
...
@@ -4912,16 +4842,16 @@ bool VarType_LoDTensorArrayDesc::MergePartialFromCodedStream(
tag
=
p
.
first
;
if
(
!
p
.
second
)
goto
handle_unusual
;
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
// required
// .paddle_mobile.framework.proto.VarType.TensorDesc tensor
// = 1;
case
1
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
10u
/* 10 & 0xFF */
))
{
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
mutable_tensor
()));
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
mutable_tensor
()));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -4933,11 +4863,10 @@ bool VarType_LoDTensorArrayDesc::MergePartialFromCodedStream(
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
16u
/* 16 & 0xFF */
))
{
set_has_lod_level
();
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
::
google
::
protobuf
::
int32
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_INT32
>
(
input
,
&
lod_level_
)));
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_INT32
>
(
input
,
&
lod_level_
)));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -5000,14 +4929,12 @@ size_t VarType_LoDTensorArrayDesc::ByteSizeLong() const {
// tensor = 1;
if
(
has_tensor
())
{
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
*
this
->
tensor_
);
}
// optional int32 lod_level = 2 [default = 0];
if
(
has_lod_level
())
{
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
Int32Size
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
Int32Size
(
this
->
lod_level
());
}
...
...
@@ -5020,8 +4947,7 @@ size_t VarType_LoDTensorArrayDesc::ByteSizeLong() const {
void
VarType_LoDTensorArrayDesc
::
CheckTypeAndMergeFrom
(
const
::
google
::
protobuf
::
MessageLite
&
from
)
{
MergeFrom
(
*::
google
::
protobuf
::
down_cast
<
const
VarType_LoDTensorArrayDesc
*>
(
MergeFrom
(
*::
google
::
protobuf
::
down_cast
<
const
VarType_LoDTensorArrayDesc
*>
(
&
from
));
}
...
...
@@ -5036,8 +4962,9 @@ void VarType_LoDTensorArrayDesc::MergeFrom(
cached_has_bits
=
from
.
_has_bits_
[
0
];
if
(
cached_has_bits
&
3u
)
{
if
(
cached_has_bits
&
0x00000001u
)
{
mutable_tensor
()
->::
paddle_mobile
::
framework
::
proto
::
VarType_TensorDesc
::
MergeFrom
(
from
.
tensor
());
mutable_tensor
()
->::
paddle_mobile
::
framework
::
proto
::
VarType_TensorDesc
::
MergeFrom
(
from
.
tensor
());
}
if
(
cached_has_bits
&
0x00000002u
)
{
lod_level_
=
from
.
lod_level_
;
...
...
@@ -5252,16 +5179,16 @@ bool VarType_ReaderDesc::MergePartialFromCodedStream(
tag
=
p
.
first
;
if
(
!
p
.
second
)
goto
handle_unusual
;
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
// repeated
// .paddle_mobile.framework.proto.VarType.LoDTensorDesc
// lod_tensor = 1;
case
1
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
10u
/* 10 & 0xFF */
))
{
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
add_lod_tensor
()));
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
add_lod_tensor
()));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -5323,8 +5250,9 @@ size_t VarType_ReaderDesc::ByteSizeLong() const {
unsigned
int
count
=
static_cast
<
unsigned
int
>
(
this
->
lod_tensor_size
());
total_size
+=
1UL
*
count
;
for
(
unsigned
int
i
=
0
;
i
<
count
;
i
++
)
{
total_size
+=
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
this
->
lod_tensor
(
static_cast
<
int
>
(
i
)));
total_size
+=
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
this
->
lod_tensor
(
static_cast
<
int
>
(
i
)));
}
}
...
...
@@ -5337,8 +5265,7 @@ size_t VarType_ReaderDesc::ByteSizeLong() const {
void
VarType_ReaderDesc
::
CheckTypeAndMergeFrom
(
const
::
google
::
protobuf
::
MessageLite
&
from
)
{
MergeFrom
(
*::
google
::
protobuf
::
down_cast
<
const
VarType_ReaderDesc
*>
(
&
from
));
MergeFrom
(
*::
google
::
protobuf
::
down_cast
<
const
VarType_ReaderDesc
*>
(
&
from
));
}
void
VarType_ReaderDesc
::
MergeFrom
(
const
VarType_ReaderDesc
&
from
)
{
...
...
@@ -5516,23 +5443,20 @@ bool VarType_ChannelDesc::MergePartialFromCodedStream(
tag
=
p
.
first
;
if
(
!
p
.
second
)
goto
handle_unusual
;
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
// required .paddle_mobile.framework.proto.VarType.Type
// data_type = 1;
case
1
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
8u
/* 8 & 0xFF */
))
{
int
value
;
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
int
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_ENUM
>
(
input
,
&
value
)));
if
(
::
paddle_mobile
::
framework
::
proto
::
VarType_Type_IsValid
(
value
))
{
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
int
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_ENUM
>
(
input
,
&
value
)));
if
(
::
paddle_mobile
::
framework
::
proto
::
VarType_Type_IsValid
(
value
))
{
set_data_type
(
static_cast
<
::
paddle_mobile
::
framework
::
proto
::
VarType_Type
>
(
static_cast
<::
paddle_mobile
::
framework
::
proto
::
VarType_Type
>
(
value
));
}
else
{
unknown_fields_stream
.
WriteVarint32
(
8u
);
...
...
@@ -5550,11 +5474,10 @@ bool VarType_ChannelDesc::MergePartialFromCodedStream(
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
16u
/* 16 & 0xFF */
))
{
set_has_capacity
();
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
::
google
::
protobuf
::
int64
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_INT64
>
(
input
,
&
capacity_
)));
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_INT64
>
(
input
,
&
capacity_
)));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -5613,16 +5536,14 @@ size_t VarType_ChannelDesc::RequiredFieldsByteSizeFallback() const {
if
(
has_capacity
())
{
// required int64 capacity = 2;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
Int64Size
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
Int64Size
(
this
->
capacity
());
}
if
(
has_data_type
())
{
// required .paddle_mobile.framework.proto.VarType.Type
// data_type = 1;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
EnumSize
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
EnumSize
(
this
->
data_type
());
}
...
...
@@ -5637,14 +5558,12 @@ size_t VarType_ChannelDesc::ByteSizeLong() const {
if
(((
_has_bits_
[
0
]
&
0x00000003
)
^
0x00000003
)
==
0
)
{
// All required fields are present.
// required int64 capacity = 2;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
Int64Size
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
Int64Size
(
this
->
capacity
());
// required .paddle_mobile.framework.proto.VarType.Type
// data_type = 1;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
EnumSize
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
EnumSize
(
this
->
data_type
());
}
else
{
...
...
@@ -5659,8 +5578,7 @@ size_t VarType_ChannelDesc::ByteSizeLong() const {
void
VarType_ChannelDesc
::
CheckTypeAndMergeFrom
(
const
::
google
::
protobuf
::
MessageLite
&
from
)
{
MergeFrom
(
*::
google
::
protobuf
::
down_cast
<
const
VarType_ChannelDesc
*>
(
&
from
));
MergeFrom
(
*::
google
::
protobuf
::
down_cast
<
const
VarType_ChannelDesc
*>
(
&
from
));
}
void
VarType_ChannelDesc
::
MergeFrom
(
const
VarType_ChannelDesc
&
from
)
{
...
...
@@ -5848,23 +5766,20 @@ bool VarType_Tuple::MergePartialFromCodedStream(
tag
=
p
.
first
;
if
(
!
p
.
second
)
goto
handle_unusual
;
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
// repeated .paddle_mobile.framework.proto.VarType.Type
// element_type = 1;
case
1
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
8u
/* 8 & 0xFF */
))
{
int
value
;
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
int
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_ENUM
>
(
input
,
&
value
)));
if
(
::
paddle_mobile
::
framework
::
proto
::
VarType_Type_IsValid
(
value
))
{
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
int
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_ENUM
>
(
input
,
&
value
)));
if
(
::
paddle_mobile
::
framework
::
proto
::
VarType_Type_IsValid
(
value
))
{
add_element_type
(
static_cast
<
::
paddle_mobile
::
framework
::
proto
::
VarType_Type
>
(
static_cast
<::
paddle_mobile
::
framework
::
proto
::
VarType_Type
>
(
value
));
}
else
{
unknown_fields_stream
.
WriteVarint32
(
tag
);
...
...
@@ -5872,15 +5787,12 @@ bool VarType_Tuple::MergePartialFromCodedStream(
static_cast
<::
google
::
protobuf
::
uint32
>
(
value
));
}
}
else
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
10u
/* 10 & 0xFF */
))
{
static_cast
<::
google
::
protobuf
::
uint8
>
(
10u
/* 10 & 0xFF */
))
{
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPackedEnumPreserveUnknowns
(
input
,
1
,
::
paddle_mobile
::
framework
::
proto
::
VarType_Type_IsValid
,
&
unknown_fields_stream
,
this
->
mutable_element_type
())));
::
paddle_mobile
::
framework
::
proto
::
VarType_Type_IsValid
,
&
unknown_fields_stream
,
this
->
mutable_element_type
())));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -5936,8 +5848,7 @@ size_t VarType_Tuple::ByteSizeLong() const {
// element_type = 1;
{
size_t
data_size
=
0
;
unsigned
int
count
=
static_cast
<
unsigned
int
>
(
this
->
element_type_size
());
unsigned
int
count
=
static_cast
<
unsigned
int
>
(
this
->
element_type_size
());
for
(
unsigned
int
i
=
0
;
i
<
count
;
i
++
)
{
data_size
+=
::
google
::
protobuf
::
internal
::
WireFormatLite
::
EnumSize
(
this
->
element_type
(
static_cast
<
int
>
(
i
)));
...
...
@@ -6056,15 +5967,13 @@ VarType::VarType(const VarType &from)
_has_bits_
(
from
.
_has_bits_
),
_cached_size_
(
0
)
{
_internal_metadata_
.
MergeFrom
(
from
.
_internal_metadata_
);
if
(
from
.
has_selected_rows
())
{
selected_rows_
=
new
::
paddle_mobile
::
framework
::
proto
::
VarType_TensorDesc
(
selected_rows_
=
new
::
paddle_mobile
::
framework
::
proto
::
VarType_TensorDesc
(
*
from
.
selected_rows_
);
}
else
{
selected_rows_
=
NULL
;
}
if
(
from
.
has_lod_tensor
())
{
lod_tensor_
=
new
::
paddle_mobile
::
framework
::
proto
::
VarType_LoDTensorDesc
(
lod_tensor_
=
new
::
paddle_mobile
::
framework
::
proto
::
VarType_LoDTensorDesc
(
*
from
.
lod_tensor_
);
}
else
{
lod_tensor_
=
NULL
;
...
...
@@ -6089,8 +5998,7 @@ VarType::VarType(const VarType &from)
channel_
=
NULL
;
}
if
(
from
.
has_tuple
())
{
tuple_
=
new
::
paddle_mobile
::
framework
::
proto
::
VarType_Tuple
(
*
from
.
tuple_
);
tuple_
=
new
::
paddle_mobile
::
framework
::
proto
::
VarType_Tuple
(
*
from
.
tuple_
);
}
else
{
tuple_
=
NULL
;
}
...
...
@@ -6154,13 +6062,13 @@ void VarType::Clear() {
if
(
cached_has_bits
&
63u
)
{
if
(
cached_has_bits
&
0x00000001u
)
{
GOOGLE_DCHECK
(
selected_rows_
!=
NULL
);
selected_rows_
->::
paddle_mobile
::
framework
::
proto
::
VarType_TensorDesc
::
Clear
();
selected_rows_
->::
paddle_mobile
::
framework
::
proto
::
VarType_TensorDesc
::
Clear
();
}
if
(
cached_has_bits
&
0x00000002u
)
{
GOOGLE_DCHECK
(
lod_tensor_
!=
NULL
);
lod_tensor_
->::
paddle_mobile
::
framework
::
proto
::
VarType_LoDTensorDesc
::
Clear
();
lod_tensor_
->::
paddle_mobile
::
framework
::
proto
::
VarType_LoDTensorDesc
::
Clear
();
}
if
(
cached_has_bits
&
0x00000004u
)
{
GOOGLE_DCHECK
(
tensor_array_
!=
NULL
);
...
...
@@ -6169,13 +6077,11 @@ void VarType::Clear() {
}
if
(
cached_has_bits
&
0x00000008u
)
{
GOOGLE_DCHECK
(
reader_
!=
NULL
);
reader_
->::
paddle_mobile
::
framework
::
proto
::
VarType_ReaderDesc
::
Clear
();
reader_
->::
paddle_mobile
::
framework
::
proto
::
VarType_ReaderDesc
::
Clear
();
}
if
(
cached_has_bits
&
0x00000010u
)
{
GOOGLE_DCHECK
(
channel_
!=
NULL
);
channel_
->::
paddle_mobile
::
framework
::
proto
::
VarType_ChannelDesc
::
Clear
();
channel_
->::
paddle_mobile
::
framework
::
proto
::
VarType_ChannelDesc
::
Clear
();
}
if
(
cached_has_bits
&
0x00000020u
)
{
GOOGLE_DCHECK
(
tuple_
!=
NULL
);
...
...
@@ -6207,22 +6113,19 @@ bool VarType::MergePartialFromCodedStream(
tag
=
p
.
first
;
if
(
!
p
.
second
)
goto
handle_unusual
;
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
// required .paddle_mobile.framework.proto.VarType.Type type
// = 1;
case
1
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
8u
/* 8 & 0xFF */
))
{
int
value
;
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
int
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_ENUM
>
(
input
,
&
value
)));
if
(
::
paddle_mobile
::
framework
::
proto
::
VarType_Type_IsValid
(
value
))
{
set_type
(
static_cast
<
::
paddle_mobile
::
framework
::
proto
::
VarType_Type
>
(
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
int
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_ENUM
>
(
input
,
&
value
)));
if
(
::
paddle_mobile
::
framework
::
proto
::
VarType_Type_IsValid
(
value
))
{
set_type
(
static_cast
<::
paddle_mobile
::
framework
::
proto
::
VarType_Type
>
(
value
));
}
else
{
unknown_fields_stream
.
WriteVarint32
(
8u
);
...
...
@@ -6241,8 +6144,8 @@ bool VarType::MergePartialFromCodedStream(
case
2
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
18u
/* 18 & 0xFF */
))
{
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
mutable_selected_rows
()));
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
mutable_selected_rows
()));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -6255,8 +6158,8 @@ bool VarType::MergePartialFromCodedStream(
case
3
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
26u
/* 26 & 0xFF */
))
{
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
mutable_lod_tensor
()));
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
mutable_lod_tensor
()));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -6269,8 +6172,8 @@ bool VarType::MergePartialFromCodedStream(
case
4
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
34u
/* 34 & 0xFF */
))
{
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
mutable_tensor_array
()));
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
mutable_tensor_array
()));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -6283,8 +6186,8 @@ bool VarType::MergePartialFromCodedStream(
case
5
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
42u
/* 42 & 0xFF */
))
{
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
mutable_reader
()));
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
mutable_reader
()));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -6298,8 +6201,8 @@ bool VarType::MergePartialFromCodedStream(
case
6
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
50u
/* 50 & 0xFF */
))
{
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
mutable_channel
()));
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
mutable_channel
()));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -6311,8 +6214,8 @@ bool VarType::MergePartialFromCodedStream(
case
7
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
58u
/* 58 & 0xFF */
))
{
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
mutable_tuple
()));
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
mutable_tuple
()));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -6394,8 +6297,8 @@ void VarType::SerializeWithCachedSizes(
// optional .paddle_mobile.framework.proto.VarType.Tuple tuple =
// 7;
if
(
cached_has_bits
&
0x00000020u
)
{
::
google
::
protobuf
::
internal
::
WireFormatLite
::
WriteMessage
(
7
,
*
this
->
tuple_
,
output
);
::
google
::
protobuf
::
internal
::
WireFormatLite
::
WriteMessage
(
7
,
*
this
->
tuple_
,
output
);
}
output
->
WriteRaw
(
...
...
@@ -6413,8 +6316,7 @@ size_t VarType::ByteSizeLong() const {
// required .paddle_mobile.framework.proto.VarType.Type type =
// 1;
if
(
has_type
())
{
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
EnumSize
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
EnumSize
(
this
->
type
());
}
if
(
_has_bits_
[
0
/
32
]
&
63u
)
{
...
...
@@ -6423,8 +6325,10 @@ size_t VarType::ByteSizeLong() const {
// selected_rows
// = 2;
if
(
has_selected_rows
())
{
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
*
this
->
selected_rows_
);
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
*
this
->
selected_rows_
);
}
// optional
...
...
@@ -6432,39 +6336,49 @@ size_t VarType::ByteSizeLong() const {
// lod_tensor
// = 3;
if
(
has_lod_tensor
())
{
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
*
this
->
lod_tensor_
);
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
*
this
->
lod_tensor_
);
}
// optional
// .paddle_mobile.framework.proto.VarType.LoDTensorArrayDesc
// tensor_array = 4;
if
(
has_tensor_array
())
{
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
*
this
->
tensor_array_
);
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
*
this
->
tensor_array_
);
}
// optional
// .paddle_mobile.framework.proto.VarType.ReaderDesc reader
// = 5;
if
(
has_reader
())
{
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
*
this
->
reader_
);
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
*
this
->
reader_
);
}
// optional
// .paddle_mobile.framework.proto.VarType.ChannelDesc
// channel = 6;
if
(
has_channel
())
{
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
*
this
->
channel_
);
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
*
this
->
channel_
);
}
// optional .paddle_mobile.framework.proto.VarType.Tuple
// tuple = 7;
if
(
has_tuple
())
{
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
*
this
->
tuple_
);
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
*
this
->
tuple_
);
}
}
int
cached_size
=
::
google
::
protobuf
::
internal
::
ToCachedSize
(
total_size
);
...
...
@@ -6489,24 +6403,28 @@ void VarType::MergeFrom(const VarType &from) {
cached_has_bits
=
from
.
_has_bits_
[
0
];
if
(
cached_has_bits
&
127u
)
{
if
(
cached_has_bits
&
0x00000001u
)
{
mutable_selected_rows
()
->::
paddle_mobile
::
framework
::
proto
::
VarType_TensorDesc
::
MergeFrom
(
from
.
selected_rows
());
mutable_selected_rows
()
->::
paddle_mobile
::
framework
::
proto
::
VarType_TensorDesc
::
MergeFrom
(
from
.
selected_rows
());
}
if
(
cached_has_bits
&
0x00000002u
)
{
mutable_lod_tensor
()
->::
paddle_mobile
::
framework
::
proto
::
VarType_LoDTensorDesc
::
MergeFrom
(
from
.
lod_tensor
());
mutable_lod_tensor
()
->::
paddle_mobile
::
framework
::
proto
::
VarType_LoDTensorDesc
::
MergeFrom
(
from
.
lod_tensor
());
}
if
(
cached_has_bits
&
0x00000004u
)
{
mutable_tensor_array
()
->::
paddle_mobile
::
framework
::
proto
::
VarType_LoDTensorArrayDesc
::
MergeFrom
(
from
.
tensor_array
());
}
if
(
cached_has_bits
&
0x00000008u
)
{
mutable_reader
()
->::
paddle_mobile
::
framework
::
proto
::
VarType_ReaderDesc
::
MergeFrom
(
from
.
reader
());
mutable_reader
()
->::
paddle_mobile
::
framework
::
proto
::
VarType_ReaderDesc
::
MergeFrom
(
from
.
reader
());
}
if
(
cached_has_bits
&
0x00000010u
)
{
mutable_channel
()
->::
paddle_mobile
::
framework
::
proto
::
VarType_ChannelDesc
::
MergeFrom
(
from
.
channel
());
mutable_channel
()
->::
paddle_mobile
::
framework
::
proto
::
VarType_ChannelDesc
::
MergeFrom
(
from
.
channel
());
}
if
(
cached_has_bits
&
0x00000020u
)
{
mutable_tuple
()
...
...
@@ -6628,8 +6546,7 @@ VarType::selected_rows() const {
VarType
::
mutable_selected_rows
()
{
set_has_selected_rows
();
if
(
selected_rows_
==
NULL
)
{
selected_rows_
=
new
::
paddle_mobile
::
framework
::
proto
::
VarType_TensorDesc
;
selected_rows_
=
new
::
paddle_mobile
::
framework
::
proto
::
VarType_TensorDesc
;
}
// @@protoc_insertion_point(field_mutable:paddle_mobile.framework.proto.VarType.selected_rows)
return
selected_rows_
;
...
...
@@ -6638,8 +6555,7 @@ VarType::mutable_selected_rows() {
VarType
::
release_selected_rows
()
{
// @@protoc_insertion_point(field_release:paddle_mobile.framework.proto.VarType.selected_rows)
clear_has_selected_rows
();
::
paddle_mobile
::
framework
::
proto
::
VarType_TensorDesc
*
temp
=
selected_rows_
;
::
paddle_mobile
::
framework
::
proto
::
VarType_TensorDesc
*
temp
=
selected_rows_
;
selected_rows_
=
NULL
;
return
temp
;
}
...
...
@@ -6674,8 +6590,8 @@ VarType::lod_tensor() const {
lod_tensor_
;
// @@protoc_insertion_point(field_get:paddle_mobile.framework.proto.VarType.lod_tensor)
return
p
!=
NULL
?
*
p
:
*
reinterpret_cast
<
const
::
paddle_mobile
::
framework
::
proto
::
VarType_LoDTensorDesc
*>
(
:
*
reinterpret_cast
<
const
::
paddle_mobile
::
framework
::
proto
::
VarType_LoDTensorDesc
*>
(
&::
paddle_mobile
::
framework
::
proto
::
_VarType_LoDTensorDesc_default_instance_
);
}
...
...
@@ -6683,8 +6599,7 @@ VarType::lod_tensor() const {
VarType
::
mutable_lod_tensor
()
{
set_has_lod_tensor
();
if
(
lod_tensor_
==
NULL
)
{
lod_tensor_
=
new
::
paddle_mobile
::
framework
::
proto
::
VarType_LoDTensorDesc
;
lod_tensor_
=
new
::
paddle_mobile
::
framework
::
proto
::
VarType_LoDTensorDesc
;
}
// @@protoc_insertion_point(field_mutable:paddle_mobile.framework.proto.VarType.lod_tensor)
return
lod_tensor_
;
...
...
@@ -6693,8 +6608,7 @@ VarType::mutable_lod_tensor() {
VarType
::
release_lod_tensor
()
{
// @@protoc_insertion_point(field_release:paddle_mobile.framework.proto.VarType.lod_tensor)
clear_has_lod_tensor
();
::
paddle_mobile
::
framework
::
proto
::
VarType_LoDTensorDesc
*
temp
=
lod_tensor_
;
::
paddle_mobile
::
framework
::
proto
::
VarType_LoDTensorDesc
*
temp
=
lod_tensor_
;
lod_tensor_
=
NULL
;
return
temp
;
}
...
...
@@ -6729,8 +6643,7 @@ VarType::tensor_array() const {
const
::
paddle_mobile
::
framework
::
proto
::
VarType_LoDTensorArrayDesc
*
p
=
tensor_array_
;
// @@protoc_insertion_point(field_get:paddle_mobile.framework.proto.VarType.tensor_array)
return
p
!=
NULL
?
*
p
return
p
!=
NULL
?
*
p
:
*
reinterpret_cast
<
const
::
paddle_mobile
::
framework
::
proto
::
VarType_LoDTensorArrayDesc
*>
(
&::
paddle_mobile
::
framework
::
proto
::
...
...
@@ -6825,18 +6738,18 @@ void VarType::set_has_channel() { _has_bits_[0] |= 0x00000010u; }
void
VarType
::
clear_has_channel
()
{
_has_bits_
[
0
]
&=
~
0x00000010u
;
}
void
VarType
::
clear_channel
()
{
if
(
channel_
!=
NULL
)
channel_
->::
paddle_mobile
::
framework
::
proto
::
VarType_ChannelDesc
::
Clear
();
channel_
->::
paddle_mobile
::
framework
::
proto
::
VarType_ChannelDesc
::
Clear
();
clear_has_channel
();
}
const
::
paddle_mobile
::
framework
::
proto
::
VarType_ChannelDesc
&
VarType
::
channel
()
const
{
const
::
paddle_mobile
::
framework
::
proto
::
VarType_ChannelDesc
*
p
=
channel_
;
// @@protoc_insertion_point(field_get:paddle_mobile.framework.proto.VarType.channel)
return
p
!=
NULL
?
*
p
:
*
reinterpret_cast
<
const
::
paddle_mobile
::
framework
::
proto
::
VarType_ChannelDesc
*>
(
&::
paddle_mobile
::
framework
::
proto
::
return
p
!=
NULL
?
*
p
:
*
reinterpret_cast
<
const
::
paddle_mobile
::
framework
::
proto
::
VarType_ChannelDesc
*>
(
&::
paddle_mobile
::
framework
::
proto
::
_VarType_ChannelDesc_default_instance_
);
}
::
paddle_mobile
::
framework
::
proto
::
VarType_ChannelDesc
*
...
...
@@ -7035,8 +6948,8 @@ bool VarDesc::MergePartialFromCodedStream(
tag
=
p
.
first
;
if
(
!
p
.
second
)
goto
handle_unusual
;
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
// required string name = 1;
case
1
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
...
...
@@ -7053,8 +6966,8 @@ bool VarDesc::MergePartialFromCodedStream(
case
2
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
18u
/* 18 & 0xFF */
))
{
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
mutable_type
()));
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
mutable_type
()));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -7066,10 +6979,9 @@ bool VarDesc::MergePartialFromCodedStream(
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
24u
/* 24 & 0xFF */
))
{
set_has_persistable
();
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
bool
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_BOOL
>
(
input
,
&
persistable_
)));
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
bool
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_BOOL
>
(
input
,
&
persistable_
)));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -7111,8 +7023,8 @@ void VarDesc::SerializeWithCachedSizes(
// required .paddle_mobile.framework.proto.VarType type = 2;
if
(
cached_has_bits
&
0x00000002u
)
{
::
google
::
protobuf
::
internal
::
WireFormatLite
::
WriteMessage
(
2
,
*
this
->
type_
,
output
);
::
google
::
protobuf
::
internal
::
WireFormatLite
::
WriteMessage
(
2
,
*
this
->
type_
,
output
);
}
// optional bool persistable = 3 [default = false];
...
...
@@ -7133,16 +7045,14 @@ size_t VarDesc::RequiredFieldsByteSizeFallback() const {
if
(
has_name
())
{
// required string name = 1;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
this
->
name
());
}
if
(
has_type
())
{
// required .paddle_mobile.framework.proto.VarType type = 2;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
*
this
->
type_
);
}
...
...
@@ -7157,14 +7067,12 @@ size_t VarDesc::ByteSizeLong() const {
if
(((
_has_bits_
[
0
]
&
0x00000003
)
^
0x00000003
)
==
0
)
{
// All required fields are present.
// required string name = 1;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
StringSize
(
this
->
name
());
// required .paddle_mobile.framework.proto.VarType type = 2;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
*
this
->
type_
);
}
else
{
...
...
@@ -7203,8 +7111,7 @@ void VarDesc::MergeFrom(const VarDesc &from) {
from
.
name_
);
}
if
(
cached_has_bits
&
0x00000002u
)
{
mutable_type
()
->::
paddle_mobile
::
framework
::
proto
::
VarType
::
MergeFrom
(
mutable_type
()
->::
paddle_mobile
::
framework
::
proto
::
VarType
::
MergeFrom
(
from
.
type
());
}
if
(
cached_has_bits
&
0x00000004u
)
{
...
...
@@ -7269,15 +7176,14 @@ const ::std::string &VarDesc::name() const {
}
void
VarDesc
::
set_name
(
const
::
std
::
string
&
value
)
{
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
value
);
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
value
);
// @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.VarDesc.name)
}
#if LANG_CXX11
void
VarDesc
::
set_name
(
::
std
::
string
&&
value
)
{
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
move
(
value
));
// @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.VarDesc.name)
}
...
...
@@ -7285,15 +7191,13 @@ void VarDesc::set_name(::std::string &&value) {
void
VarDesc
::
set_name
(
const
char
*
value
)
{
GOOGLE_DCHECK
(
value
!=
NULL
);
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
string
(
value
));
// @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.VarDesc.name)
}
void
VarDesc
::
set_name
(
const
char
*
value
,
size_t
size
)
{
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
string
(
reinterpret_cast
<
const
char
*>
(
value
),
size
));
// @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.VarDesc.name)
}
...
...
@@ -7491,18 +7395,17 @@ bool BlockDesc::MergePartialFromCodedStream(
tag
=
p
.
first
;
if
(
!
p
.
second
)
goto
handle_unusual
;
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
// required int32 idx = 1;
case
1
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
8u
/* 8 & 0xFF */
))
{
set_has_idx
();
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
::
google
::
protobuf
::
int32
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_INT32
>
(
input
,
&
idx_
)));
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_INT32
>
(
input
,
&
idx_
)));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -7514,11 +7417,10 @@ bool BlockDesc::MergePartialFromCodedStream(
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
16u
/* 16 & 0xFF */
))
{
set_has_parent_idx
();
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
::
google
::
protobuf
::
int32
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_INT32
>
(
input
,
&
parent_idx_
)));
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_INT32
>
(
input
,
&
parent_idx_
)));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -7529,8 +7431,8 @@ bool BlockDesc::MergePartialFromCodedStream(
case
3
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
26u
/* 26 & 0xFF */
))
{
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
add_vars
()));
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
add_vars
()));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -7541,8 +7443,8 @@ bool BlockDesc::MergePartialFromCodedStream(
case
4
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
34u
/* 34 & 0xFF */
))
{
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
add_ops
()));
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
add_ops
()));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -7554,11 +7456,10 @@ bool BlockDesc::MergePartialFromCodedStream(
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
40u
/* 40 & 0xFF */
))
{
set_has_forward_block_idx
();
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
DO_
((
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadPrimitive
<
::
google
::
protobuf
::
int32
,
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_INT32
>
(
input
,
&
forward_block_idx_
)));
::
google
::
protobuf
::
internal
::
WireFormatLite
::
TYPE_INT32
>
(
input
,
&
forward_block_idx_
)));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -7636,15 +7537,13 @@ size_t BlockDesc::RequiredFieldsByteSizeFallback() const {
if
(
has_idx
())
{
// required int32 idx = 1;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
Int32Size
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
Int32Size
(
this
->
idx
());
}
if
(
has_parent_idx
())
{
// required int32 parent_idx = 2;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
Int32Size
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
Int32Size
(
this
->
parent_idx
());
}
...
...
@@ -7659,13 +7558,11 @@ size_t BlockDesc::ByteSizeLong() const {
if
(((
_has_bits_
[
0
]
&
0x00000003
)
^
0x00000003
)
==
0
)
{
// All required fields are present.
// required int32 idx = 1;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
Int32Size
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
Int32Size
(
this
->
idx
());
// required int32 parent_idx = 2;
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
Int32Size
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
Int32Size
(
this
->
parent_idx
());
}
else
{
...
...
@@ -7676,8 +7573,9 @@ size_t BlockDesc::ByteSizeLong() const {
unsigned
int
count
=
static_cast
<
unsigned
int
>
(
this
->
vars_size
());
total_size
+=
1UL
*
count
;
for
(
unsigned
int
i
=
0
;
i
<
count
;
i
++
)
{
total_size
+=
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
this
->
vars
(
static_cast
<
int
>
(
i
)));
total_size
+=
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
this
->
vars
(
static_cast
<
int
>
(
i
)));
}
}
...
...
@@ -7686,15 +7584,15 @@ size_t BlockDesc::ByteSizeLong() const {
unsigned
int
count
=
static_cast
<
unsigned
int
>
(
this
->
ops_size
());
total_size
+=
1UL
*
count
;
for
(
unsigned
int
i
=
0
;
i
<
count
;
i
++
)
{
total_size
+=
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
this
->
ops
(
static_cast
<
int
>
(
i
)));
total_size
+=
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
this
->
ops
(
static_cast
<
int
>
(
i
)));
}
}
// optional int32 forward_block_idx = 5 [default = -1];
if
(
has_forward_block_idx
())
{
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
Int32Size
(
total_size
+=
1
+
::
google
::
protobuf
::
internal
::
WireFormatLite
::
Int32Size
(
this
->
forward_block_idx
());
}
...
...
@@ -7971,15 +7869,15 @@ bool ProgramDesc::MergePartialFromCodedStream(
tag
=
p
.
first
;
if
(
!
p
.
second
)
goto
handle_unusual
;
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
switch
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
GetTagFieldNumber
(
tag
))
{
// repeated .paddle_mobile.framework.proto.BlockDesc blocks
// = 1;
case
1
:
{
if
(
static_cast
<::
google
::
protobuf
::
uint8
>
(
tag
)
==
static_cast
<::
google
::
protobuf
::
uint8
>
(
10u
/* 10 & 0xFF */
))
{
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
add_blocks
()));
DO_
(
::
google
::
protobuf
::
internal
::
WireFormatLite
::
ReadMessageNoVirtual
(
input
,
add_blocks
()));
}
else
{
goto
handle_unusual
;
}
...
...
@@ -8036,8 +7934,9 @@ size_t ProgramDesc::ByteSizeLong() const {
unsigned
int
count
=
static_cast
<
unsigned
int
>
(
this
->
blocks_size
());
total_size
+=
1UL
*
count
;
for
(
unsigned
int
i
=
0
;
i
<
count
;
i
++
)
{
total_size
+=
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
this
->
blocks
(
static_cast
<
int
>
(
i
)));
total_size
+=
::
google
::
protobuf
::
internal
::
WireFormatLite
::
MessageSizeNoVirtual
(
this
->
blocks
(
static_cast
<
int
>
(
i
)));
}
}
...
...
src/framework/framework.pb.h
浏览文件 @
1ad8f821
...
...
@@ -160,7 +160,7 @@ class OpDesc_Attr
MessageLite
/* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.OpDesc.Attr)
*/
{
public:
public:
OpDesc_Attr
();
virtual
~
OpDesc_Attr
();
...
...
@@ -224,17 +224,17 @@ class OpDesc_Attr
void
DiscardUnknownFields
();
int
GetCachedSize
()
const
PROTOBUF_FINAL
{
return
_cached_size_
;
}
private:
private:
void
SharedCtor
();
void
SharedDtor
();
void
SetCachedSize
(
int
size
)
const
;
void
InternalSwap
(
OpDesc_Attr
*
other
);
private:
private:
inline
::
google
::
protobuf
::
Arena
*
GetArenaNoVirtual
()
const
{
return
NULL
;
}
inline
void
*
MaybeArenaPtr
()
const
{
return
NULL
;
}
public:
public:
::
std
::
string
GetTypeName
()
const
PROTOBUF_FINAL
;
// nested types
...
...
@@ -252,8 +252,7 @@ class OpDesc_Attr
void
add_ints
(
::
google
::
protobuf
::
int32
value
);
const
::
google
::
protobuf
::
RepeatedField
<::
google
::
protobuf
::
int32
>
&
ints
()
const
;
::
google
::
protobuf
::
RepeatedField
<::
google
::
protobuf
::
int32
>
*
mutable_ints
();
::
google
::
protobuf
::
RepeatedField
<::
google
::
protobuf
::
int32
>
*
mutable_ints
();
// repeated float floats = 7;
int
floats_size
()
const
;
...
...
@@ -370,7 +369,7 @@ class OpDesc_Attr
void
set_block_idx
(
::
google
::
protobuf
::
int32
value
);
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.OpDesc.Attr)
private:
private:
void
set_has_name
();
void
clear_has_name
();
void
set_has_type
();
...
...
@@ -416,7 +415,7 @@ class OpDesc_Var
MessageLite
/* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.OpDesc.Var)
*/
{
public:
public:
OpDesc_Var
();
virtual
~
OpDesc_Var
();
...
...
@@ -451,8 +450,7 @@ class OpDesc_Var
static
const
OpDesc_Var
&
default_instance
();
static
inline
const
OpDesc_Var
*
internal_default_instance
()
{
return
reinterpret_cast
<
const
OpDesc_Var
*>
(
&
_OpDesc_Var_default_instance_
);
return
reinterpret_cast
<
const
OpDesc_Var
*>
(
&
_OpDesc_Var_default_instance_
);
}
static
PROTOBUF_CONSTEXPR
int
const
kIndexInFileMessages
=
1
;
...
...
@@ -480,17 +478,17 @@ class OpDesc_Var
void
DiscardUnknownFields
();
int
GetCachedSize
()
const
PROTOBUF_FINAL
{
return
_cached_size_
;
}
private:
private:
void
SharedCtor
();
void
SharedDtor
();
void
SetCachedSize
(
int
size
)
const
;
void
InternalSwap
(
OpDesc_Var
*
other
);
private:
private:
inline
::
google
::
protobuf
::
Arena
*
GetArenaNoVirtual
()
const
{
return
NULL
;
}
inline
void
*
MaybeArenaPtr
()
const
{
return
NULL
;
}
public:
public:
::
std
::
string
GetTypeName
()
const
PROTOBUF_FINAL
;
// nested types
...
...
@@ -518,8 +516,7 @@ class OpDesc_Var
#endif
void
add_arguments
(
const
char
*
value
);
void
add_arguments
(
const
char
*
value
,
size_t
size
);
const
::
google
::
protobuf
::
RepeatedPtrField
<::
std
::
string
>
&
arguments
()
const
;
const
::
google
::
protobuf
::
RepeatedPtrField
<::
std
::
string
>
&
arguments
()
const
;
::
google
::
protobuf
::
RepeatedPtrField
<::
std
::
string
>
*
mutable_arguments
();
// required string parameter = 1;
...
...
@@ -538,7 +535,7 @@ class OpDesc_Var
void
set_allocated_parameter
(
::
std
::
string
*
parameter
);
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.OpDesc.Var)
private:
private:
void
set_has_parameter
();
void
clear_has_parameter
();
...
...
@@ -557,7 +554,7 @@ class OpDesc
MessageLite
/* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.OpDesc)
*/
{
public:
public:
OpDesc
();
virtual
~
OpDesc
();
...
...
@@ -618,17 +615,17 @@ class OpDesc
void
DiscardUnknownFields
();
int
GetCachedSize
()
const
PROTOBUF_FINAL
{
return
_cached_size_
;
}
private:
private:
void
SharedCtor
();
void
SharedDtor
();
void
SetCachedSize
(
int
size
)
const
;
void
InternalSwap
(
OpDesc
*
other
);
private:
private:
inline
::
google
::
protobuf
::
Arena
*
GetArenaNoVirtual
()
const
{
return
NULL
;
}
inline
void
*
MaybeArenaPtr
()
const
{
return
NULL
;
}
public:
public:
::
std
::
string
GetTypeName
()
const
PROTOBUF_FINAL
;
// nested types
...
...
@@ -645,8 +642,7 @@ class OpDesc
int
inputs_size
()
const
;
void
clear_inputs
();
static
const
int
kInputsFieldNumber
=
1
;
const
::
paddle_mobile
::
framework
::
proto
::
OpDesc_Var
&
inputs
(
int
index
)
const
;
const
::
paddle_mobile
::
framework
::
proto
::
OpDesc_Var
&
inputs
(
int
index
)
const
;
::
paddle_mobile
::
framework
::
proto
::
OpDesc_Var
*
mutable_inputs
(
int
index
);
::
paddle_mobile
::
framework
::
proto
::
OpDesc_Var
*
add_inputs
();
::
google
::
protobuf
::
RepeatedPtrField
<
...
...
@@ -661,8 +657,7 @@ class OpDesc
int
outputs_size
()
const
;
void
clear_outputs
();
static
const
int
kOutputsFieldNumber
=
2
;
const
::
paddle_mobile
::
framework
::
proto
::
OpDesc_Var
&
outputs
(
int
index
)
const
;
const
::
paddle_mobile
::
framework
::
proto
::
OpDesc_Var
&
outputs
(
int
index
)
const
;
::
paddle_mobile
::
framework
::
proto
::
OpDesc_Var
*
mutable_outputs
(
int
index
);
::
paddle_mobile
::
framework
::
proto
::
OpDesc_Var
*
add_outputs
();
::
google
::
protobuf
::
RepeatedPtrField
<
...
...
@@ -677,8 +672,7 @@ class OpDesc
int
attrs_size
()
const
;
void
clear_attrs
();
static
const
int
kAttrsFieldNumber
=
4
;
const
::
paddle_mobile
::
framework
::
proto
::
OpDesc_Attr
&
attrs
(
int
index
)
const
;
const
::
paddle_mobile
::
framework
::
proto
::
OpDesc_Attr
&
attrs
(
int
index
)
const
;
::
paddle_mobile
::
framework
::
proto
::
OpDesc_Attr
*
mutable_attrs
(
int
index
);
::
paddle_mobile
::
framework
::
proto
::
OpDesc_Attr
*
add_attrs
();
::
google
::
protobuf
::
RepeatedPtrField
<
...
...
@@ -711,7 +705,7 @@ class OpDesc
void
set_is_target
(
bool
value
);
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.OpDesc)
private:
private:
void
set_has_type
();
void
clear_has_type
();
void
set_has_is_target
();
...
...
@@ -741,7 +735,7 @@ class OpProto_Var
MessageLite
/* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.OpProto.Var)
*/
{
public:
public:
OpProto_Var
();
virtual
~
OpProto_Var
();
...
...
@@ -805,17 +799,17 @@ class OpProto_Var
void
DiscardUnknownFields
();
int
GetCachedSize
()
const
PROTOBUF_FINAL
{
return
_cached_size_
;
}
private:
private:
void
SharedCtor
();
void
SharedDtor
();
void
SetCachedSize
(
int
size
)
const
;
void
InternalSwap
(
OpProto_Var
*
other
);
private:
private:
inline
::
google
::
protobuf
::
Arena
*
GetArenaNoVirtual
()
const
{
return
NULL
;
}
inline
void
*
MaybeArenaPtr
()
const
{
return
NULL
;
}
public:
public:
::
std
::
string
GetTypeName
()
const
PROTOBUF_FINAL
;
// nested types
...
...
@@ -876,7 +870,7 @@ class OpProto_Var
void
set_dispensable
(
bool
value
);
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.OpProto.Var)
private:
private:
void
set_has_name
();
void
clear_has_name
();
void
set_has_comment
();
...
...
@@ -909,7 +903,7 @@ class OpProto_Attr
MessageLite
/* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.OpProto.Attr)
*/
{
public:
public:
OpProto_Attr
();
virtual
~
OpProto_Attr
();
...
...
@@ -973,17 +967,17 @@ class OpProto_Attr
void
DiscardUnknownFields
();
int
GetCachedSize
()
const
PROTOBUF_FINAL
{
return
_cached_size_
;
}
private:
private:
void
SharedCtor
();
void
SharedDtor
();
void
SetCachedSize
(
int
size
)
const
;
void
InternalSwap
(
OpProto_Attr
*
other
);
private:
private:
inline
::
google
::
protobuf
::
Arena
*
GetArenaNoVirtual
()
const
{
return
NULL
;
}
inline
void
*
MaybeArenaPtr
()
const
{
return
NULL
;
}
public:
public:
::
std
::
string
GetTypeName
()
const
PROTOBUF_FINAL
;
// nested types
...
...
@@ -1037,7 +1031,7 @@ class OpProto_Attr
void
set_generated
(
bool
value
);
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.OpProto.Attr)
private:
private:
void
set_has_name
();
void
clear_has_name
();
void
set_has_type
();
...
...
@@ -1067,7 +1061,7 @@ class OpProto
MessageLite
/* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.OpProto)
*/
{
public:
public:
OpProto
();
virtual
~
OpProto
();
...
...
@@ -1128,17 +1122,17 @@ class OpProto
void
DiscardUnknownFields
();
int
GetCachedSize
()
const
PROTOBUF_FINAL
{
return
_cached_size_
;
}
private:
private:
void
SharedCtor
();
void
SharedDtor
();
void
SetCachedSize
(
int
size
)
const
;
void
InternalSwap
(
OpProto
*
other
);
private:
private:
inline
::
google
::
protobuf
::
Arena
*
GetArenaNoVirtual
()
const
{
return
NULL
;
}
inline
void
*
MaybeArenaPtr
()
const
{
return
NULL
;
}
public:
public:
::
std
::
string
GetTypeName
()
const
PROTOBUF_FINAL
;
// nested types
...
...
@@ -1155,8 +1149,7 @@ class OpProto
int
inputs_size
()
const
;
void
clear_inputs
();
static
const
int
kInputsFieldNumber
=
2
;
const
::
paddle_mobile
::
framework
::
proto
::
OpProto_Var
&
inputs
(
int
index
)
const
;
const
::
paddle_mobile
::
framework
::
proto
::
OpProto_Var
&
inputs
(
int
index
)
const
;
::
paddle_mobile
::
framework
::
proto
::
OpProto_Var
*
mutable_inputs
(
int
index
);
::
paddle_mobile
::
framework
::
proto
::
OpProto_Var
*
add_inputs
();
::
google
::
protobuf
::
RepeatedPtrField
<
...
...
@@ -1187,8 +1180,7 @@ class OpProto
int
attrs_size
()
const
;
void
clear_attrs
();
static
const
int
kAttrsFieldNumber
=
4
;
const
::
paddle_mobile
::
framework
::
proto
::
OpProto_Attr
&
attrs
(
int
index
)
const
;
const
::
paddle_mobile
::
framework
::
proto
::
OpProto_Attr
&
attrs
(
int
index
)
const
;
::
paddle_mobile
::
framework
::
proto
::
OpProto_Attr
*
mutable_attrs
(
int
index
);
::
paddle_mobile
::
framework
::
proto
::
OpProto_Attr
*
add_attrs
();
::
google
::
protobuf
::
RepeatedPtrField
<
...
...
@@ -1229,7 +1221,7 @@ class OpProto
void
set_allocated_comment
(
::
std
::
string
*
comment
);
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.OpProto)
private:
private:
void
set_has_type
();
void
clear_has_type
();
void
set_has_comment
();
...
...
@@ -1262,7 +1254,7 @@ class VarType_TensorDesc
MessageLite
/* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.VarType.TensorDesc)
*/
{
public:
public:
VarType_TensorDesc
();
virtual
~
VarType_TensorDesc
();
...
...
@@ -1304,9 +1296,7 @@ class VarType_TensorDesc
static
PROTOBUF_CONSTEXPR
int
const
kIndexInFileMessages
=
6
;
void
Swap
(
VarType_TensorDesc
*
other
);
friend
void
swap
(
VarType_TensorDesc
&
a
,
VarType_TensorDesc
&
b
)
{
a
.
Swap
(
&
b
);
}
friend
void
swap
(
VarType_TensorDesc
&
a
,
VarType_TensorDesc
&
b
)
{
a
.
Swap
(
&
b
);
}
// implements Message
// ----------------------------------------------
...
...
@@ -1330,17 +1320,17 @@ class VarType_TensorDesc
void
DiscardUnknownFields
();
int
GetCachedSize
()
const
PROTOBUF_FINAL
{
return
_cached_size_
;
}
private:
private:
void
SharedCtor
();
void
SharedDtor
();
void
SetCachedSize
(
int
size
)
const
;
void
InternalSwap
(
VarType_TensorDesc
*
other
);
private:
private:
inline
::
google
::
protobuf
::
Arena
*
GetArenaNoVirtual
()
const
{
return
NULL
;
}
inline
void
*
MaybeArenaPtr
()
const
{
return
NULL
;
}
public:
public:
::
std
::
string
GetTypeName
()
const
PROTOBUF_FINAL
;
// nested types
...
...
@@ -1358,8 +1348,7 @@ class VarType_TensorDesc
void
add_dims
(
::
google
::
protobuf
::
int64
value
);
const
::
google
::
protobuf
::
RepeatedField
<::
google
::
protobuf
::
int64
>
&
dims
()
const
;
::
google
::
protobuf
::
RepeatedField
<::
google
::
protobuf
::
int64
>
*
mutable_dims
();
::
google
::
protobuf
::
RepeatedField
<::
google
::
protobuf
::
int64
>
*
mutable_dims
();
// required .paddle_mobile.framework.proto.VarType.Type
// data_type = 1;
...
...
@@ -1370,7 +1359,7 @@ class VarType_TensorDesc
void
set_data_type
(
::
paddle_mobile
::
framework
::
proto
::
VarType_Type
value
);
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.VarType.TensorDesc)
private:
private:
void
set_has_data_type
();
void
clear_has_data_type
();
...
...
@@ -1389,7 +1378,7 @@ class VarType_LoDTensorDesc
MessageLite
/* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.VarType.LoDTensorDesc)
*/
{
public:
public:
VarType_LoDTensorDesc
();
virtual
~
VarType_LoDTensorDesc
();
...
...
@@ -1439,9 +1428,7 @@ class VarType_LoDTensorDesc
// implements Message
// ----------------------------------------------
inline
VarType_LoDTensorDesc
*
New
()
const
PROTOBUF_FINAL
{
return
New
(
NULL
);
}
inline
VarType_LoDTensorDesc
*
New
()
const
PROTOBUF_FINAL
{
return
New
(
NULL
);
}
VarType_LoDTensorDesc
*
New
(
::
google
::
protobuf
::
Arena
*
arena
)
const
PROTOBUF_FINAL
;
...
...
@@ -1460,17 +1447,17 @@ class VarType_LoDTensorDesc
void
DiscardUnknownFields
();
int
GetCachedSize
()
const
PROTOBUF_FINAL
{
return
_cached_size_
;
}
private:
private:
void
SharedCtor
();
void
SharedDtor
();
void
SetCachedSize
(
int
size
)
const
;
void
InternalSwap
(
VarType_LoDTensorDesc
*
other
);
private:
private:
inline
::
google
::
protobuf
::
Arena
*
GetArenaNoVirtual
()
const
{
return
NULL
;
}
inline
void
*
MaybeArenaPtr
()
const
{
return
NULL
;
}
public:
public:
::
std
::
string
GetTypeName
()
const
PROTOBUF_FINAL
;
// nested types
...
...
@@ -1498,7 +1485,7 @@ class VarType_LoDTensorDesc
void
set_lod_level
(
::
google
::
protobuf
::
int32
value
);
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.VarType.LoDTensorDesc)
private:
private:
void
set_has_tensor
();
void
clear_has_tensor
();
void
set_has_lod_level
();
...
...
@@ -1519,7 +1506,7 @@ class VarType_LoDTensorArrayDesc
MessageLite
/* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.VarType.LoDTensorArrayDesc)
*/
{
public:
public:
VarType_LoDTensorArrayDesc
();
virtual
~
VarType_LoDTensorArrayDesc
();
...
...
@@ -1556,8 +1543,7 @@ class VarType_LoDTensorArrayDesc
static
const
VarType_LoDTensorArrayDesc
&
default_instance
();
static
inline
const
VarType_LoDTensorArrayDesc
*
internal_default_instance
()
{
static
inline
const
VarType_LoDTensorArrayDesc
*
internal_default_instance
()
{
return
reinterpret_cast
<
const
VarType_LoDTensorArrayDesc
*>
(
&
_VarType_LoDTensorArrayDesc_default_instance_
);
}
...
...
@@ -1593,17 +1579,17 @@ class VarType_LoDTensorArrayDesc
void
DiscardUnknownFields
();
int
GetCachedSize
()
const
PROTOBUF_FINAL
{
return
_cached_size_
;
}
private:
private:
void
SharedCtor
();
void
SharedDtor
();
void
SetCachedSize
(
int
size
)
const
;
void
InternalSwap
(
VarType_LoDTensorArrayDesc
*
other
);
private:
private:
inline
::
google
::
protobuf
::
Arena
*
GetArenaNoVirtual
()
const
{
return
NULL
;
}
inline
void
*
MaybeArenaPtr
()
const
{
return
NULL
;
}
public:
public:
::
std
::
string
GetTypeName
()
const
PROTOBUF_FINAL
;
// nested types
...
...
@@ -1631,7 +1617,7 @@ class VarType_LoDTensorArrayDesc
void
set_lod_level
(
::
google
::
protobuf
::
int32
value
);
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.VarType.LoDTensorArrayDesc)
private:
private:
void
set_has_tensor
();
void
clear_has_tensor
();
void
set_has_lod_level
();
...
...
@@ -1652,7 +1638,7 @@ class VarType_ReaderDesc
MessageLite
/* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.VarType.ReaderDesc)
*/
{
public:
public:
VarType_ReaderDesc
();
virtual
~
VarType_ReaderDesc
();
...
...
@@ -1694,9 +1680,7 @@ class VarType_ReaderDesc
static
PROTOBUF_CONSTEXPR
int
const
kIndexInFileMessages
=
9
;
void
Swap
(
VarType_ReaderDesc
*
other
);
friend
void
swap
(
VarType_ReaderDesc
&
a
,
VarType_ReaderDesc
&
b
)
{
a
.
Swap
(
&
b
);
}
friend
void
swap
(
VarType_ReaderDesc
&
a
,
VarType_ReaderDesc
&
b
)
{
a
.
Swap
(
&
b
);
}
// implements Message
// ----------------------------------------------
...
...
@@ -1720,17 +1704,17 @@ class VarType_ReaderDesc
void
DiscardUnknownFields
();
int
GetCachedSize
()
const
PROTOBUF_FINAL
{
return
_cached_size_
;
}
private:
private:
void
SharedCtor
();
void
SharedDtor
();
void
SetCachedSize
(
int
size
)
const
;
void
InternalSwap
(
VarType_ReaderDesc
*
other
);
private:
private:
inline
::
google
::
protobuf
::
Arena
*
GetArenaNoVirtual
()
const
{
return
NULL
;
}
inline
void
*
MaybeArenaPtr
()
const
{
return
NULL
;
}
public:
public:
::
std
::
string
GetTypeName
()
const
PROTOBUF_FINAL
;
// nested types
...
...
@@ -1758,7 +1742,7 @@ class VarType_ReaderDesc
lod_tensor
()
const
;
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.VarType.ReaderDesc)
private:
private:
::
google
::
protobuf
::
internal
::
InternalMetadataWithArenaLite
_internal_metadata_
;
::
google
::
protobuf
::
internal
::
HasBits
<
1
>
_has_bits_
;
...
...
@@ -1775,7 +1759,7 @@ class VarType_ChannelDesc
MessageLite
/* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.VarType.ChannelDesc)
*/
{
public:
public:
VarType_ChannelDesc
();
virtual
~
VarType_ChannelDesc
();
...
...
@@ -1843,17 +1827,17 @@ class VarType_ChannelDesc
void
DiscardUnknownFields
();
int
GetCachedSize
()
const
PROTOBUF_FINAL
{
return
_cached_size_
;
}
private:
private:
void
SharedCtor
();
void
SharedDtor
();
void
SetCachedSize
(
int
size
)
const
;
void
InternalSwap
(
VarType_ChannelDesc
*
other
);
private:
private:
inline
::
google
::
protobuf
::
Arena
*
GetArenaNoVirtual
()
const
{
return
NULL
;
}
inline
void
*
MaybeArenaPtr
()
const
{
return
NULL
;
}
public:
public:
::
std
::
string
GetTypeName
()
const
PROTOBUF_FINAL
;
// nested types
...
...
@@ -1878,7 +1862,7 @@ class VarType_ChannelDesc
void
set_data_type
(
::
paddle_mobile
::
framework
::
proto
::
VarType_Type
value
);
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.VarType.ChannelDesc)
private:
private:
void
set_has_data_type
();
void
clear_has_data_type
();
void
set_has_capacity
();
...
...
@@ -1902,7 +1886,7 @@ class VarType_Tuple
MessageLite
/* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.VarType.Tuple)
*/
{
public:
public:
VarType_Tuple
();
virtual
~
VarType_Tuple
();
...
...
@@ -1966,17 +1950,17 @@ class VarType_Tuple
void
DiscardUnknownFields
();
int
GetCachedSize
()
const
PROTOBUF_FINAL
{
return
_cached_size_
;
}
private:
private:
void
SharedCtor
();
void
SharedDtor
();
void
SetCachedSize
(
int
size
)
const
;
void
InternalSwap
(
VarType_Tuple
*
other
);
private:
private:
inline
::
google
::
protobuf
::
Arena
*
GetArenaNoVirtual
()
const
{
return
NULL
;
}
inline
void
*
MaybeArenaPtr
()
const
{
return
NULL
;
}
public:
public:
::
std
::
string
GetTypeName
()
const
PROTOBUF_FINAL
;
// nested types
...
...
@@ -1990,18 +1974,15 @@ class VarType_Tuple
int
element_type_size
()
const
;
void
clear_element_type
();
static
const
int
kElementTypeFieldNumber
=
1
;
::
paddle_mobile
::
framework
::
proto
::
VarType_Type
element_type
(
int
index
)
const
;
void
set_element_type
(
int
index
,
::
paddle_mobile
::
framework
::
proto
::
VarType_Type
element_type
(
int
index
)
const
;
void
set_element_type
(
int
index
,
::
paddle_mobile
::
framework
::
proto
::
VarType_Type
value
);
void
add_element_type
(
::
paddle_mobile
::
framework
::
proto
::
VarType_Type
value
);
void
add_element_type
(
::
paddle_mobile
::
framework
::
proto
::
VarType_Type
value
);
const
::
google
::
protobuf
::
RepeatedField
<
int
>
&
element_type
()
const
;
::
google
::
protobuf
::
RepeatedField
<
int
>
*
mutable_element_type
();
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.VarType.Tuple)
private:
private:
::
google
::
protobuf
::
internal
::
InternalMetadataWithArenaLite
_internal_metadata_
;
::
google
::
protobuf
::
internal
::
HasBits
<
1
>
_has_bits_
;
...
...
@@ -2016,7 +1997,7 @@ class VarType
MessageLite
/* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.VarType)
*/
{
public:
public:
VarType
();
virtual
~
VarType
();
...
...
@@ -2077,17 +2058,17 @@ class VarType
void
DiscardUnknownFields
();
int
GetCachedSize
()
const
PROTOBUF_FINAL
{
return
_cached_size_
;
}
private:
private:
void
SharedCtor
();
void
SharedDtor
();
void
SetCachedSize
(
int
size
)
const
;
void
InternalSwap
(
VarType
*
other
);
private:
private:
inline
::
google
::
protobuf
::
Arena
*
GetArenaNoVirtual
()
const
{
return
NULL
;
}
inline
void
*
MaybeArenaPtr
()
const
{
return
NULL
;
}
public:
public:
::
std
::
string
GetTypeName
()
const
PROTOBUF_FINAL
;
// nested types
...
...
@@ -2192,8 +2173,7 @@ class VarType
bool
has_channel
()
const
;
void
clear_channel
();
static
const
int
kChannelFieldNumber
=
6
;
const
::
paddle_mobile
::
framework
::
proto
::
VarType_ChannelDesc
&
channel
()
const
;
const
::
paddle_mobile
::
framework
::
proto
::
VarType_ChannelDesc
&
channel
()
const
;
::
paddle_mobile
::
framework
::
proto
::
VarType_ChannelDesc
*
mutable_channel
();
::
paddle_mobile
::
framework
::
proto
::
VarType_ChannelDesc
*
release_channel
();
void
set_allocated_channel
(
...
...
@@ -2207,8 +2187,8 @@ class VarType
const
::
paddle_mobile
::
framework
::
proto
::
VarType_Tuple
&
tuple
()
const
;
::
paddle_mobile
::
framework
::
proto
::
VarType_Tuple
*
mutable_tuple
();
::
paddle_mobile
::
framework
::
proto
::
VarType_Tuple
*
release_tuple
();
void
set_allocated_tuple
(
::
paddle_mobile
::
framework
::
proto
::
VarType_Tuple
*
tuple
);
void
set_allocated_tuple
(
::
paddle_mobile
::
framework
::
proto
::
VarType_Tuple
*
tuple
);
// required .paddle_mobile.framework.proto.VarType.Type type =
// 1;
...
...
@@ -2219,7 +2199,7 @@ class VarType
void
set_type
(
::
paddle_mobile
::
framework
::
proto
::
VarType_Type
value
);
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.VarType)
private:
private:
void
set_has_type
();
void
clear_has_type
();
void
set_has_selected_rows
();
...
...
@@ -2241,8 +2221,7 @@ class VarType
mutable
int
_cached_size_
;
::
paddle_mobile
::
framework
::
proto
::
VarType_TensorDesc
*
selected_rows_
;
::
paddle_mobile
::
framework
::
proto
::
VarType_LoDTensorDesc
*
lod_tensor_
;
::
paddle_mobile
::
framework
::
proto
::
VarType_LoDTensorArrayDesc
*
tensor_array_
;
::
paddle_mobile
::
framework
::
proto
::
VarType_LoDTensorArrayDesc
*
tensor_array_
;
::
paddle_mobile
::
framework
::
proto
::
VarType_ReaderDesc
*
reader_
;
::
paddle_mobile
::
framework
::
proto
::
VarType_ChannelDesc
*
channel_
;
::
paddle_mobile
::
framework
::
proto
::
VarType_Tuple
*
tuple_
;
...
...
@@ -2256,7 +2235,7 @@ class VarDesc
MessageLite
/* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.VarDesc)
*/
{
public:
public:
VarDesc
();
virtual
~
VarDesc
();
...
...
@@ -2317,17 +2296,17 @@ class VarDesc
void
DiscardUnknownFields
();
int
GetCachedSize
()
const
PROTOBUF_FINAL
{
return
_cached_size_
;
}
private:
private:
void
SharedCtor
();
void
SharedDtor
();
void
SetCachedSize
(
int
size
)
const
;
void
InternalSwap
(
VarDesc
*
other
);
private:
private:
inline
::
google
::
protobuf
::
Arena
*
GetArenaNoVirtual
()
const
{
return
NULL
;
}
inline
void
*
MaybeArenaPtr
()
const
{
return
NULL
;
}
public:
public:
::
std
::
string
GetTypeName
()
const
PROTOBUF_FINAL
;
// nested types
...
...
@@ -2368,7 +2347,7 @@ class VarDesc
void
set_persistable
(
bool
value
);
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.VarDesc)
private:
private:
void
set_has_name
();
void
clear_has_name
();
void
set_has_type
();
...
...
@@ -2395,7 +2374,7 @@ class BlockDesc
MessageLite
/* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.BlockDesc)
*/
{
public:
public:
BlockDesc
();
virtual
~
BlockDesc
();
...
...
@@ -2430,8 +2409,7 @@ class BlockDesc
static
const
BlockDesc
&
default_instance
();
static
inline
const
BlockDesc
*
internal_default_instance
()
{
return
reinterpret_cast
<
const
BlockDesc
*>
(
&
_BlockDesc_default_instance_
);
return
reinterpret_cast
<
const
BlockDesc
*>
(
&
_BlockDesc_default_instance_
);
}
static
PROTOBUF_CONSTEXPR
int
const
kIndexInFileMessages
=
14
;
...
...
@@ -2459,17 +2437,17 @@ class BlockDesc
void
DiscardUnknownFields
();
int
GetCachedSize
()
const
PROTOBUF_FINAL
{
return
_cached_size_
;
}
private:
private:
void
SharedCtor
();
void
SharedDtor
();
void
SetCachedSize
(
int
size
)
const
;
void
InternalSwap
(
BlockDesc
*
other
);
private:
private:
inline
::
google
::
protobuf
::
Arena
*
GetArenaNoVirtual
()
const
{
return
NULL
;
}
inline
void
*
MaybeArenaPtr
()
const
{
return
NULL
;
}
public:
public:
::
std
::
string
GetTypeName
()
const
PROTOBUF_FINAL
;
// nested types
...
...
@@ -2528,7 +2506,7 @@ class BlockDesc
void
set_forward_block_idx
(
::
google
::
protobuf
::
int32
value
);
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.BlockDesc)
private:
private:
void
set_has_idx
();
void
clear_has_idx
();
void
set_has_parent_idx
();
...
...
@@ -2561,7 +2539,7 @@ class ProgramDesc
MessageLite
/* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.ProgramDesc)
*/
{
public:
public:
ProgramDesc
();
virtual
~
ProgramDesc
();
...
...
@@ -2625,17 +2603,17 @@ class ProgramDesc
void
DiscardUnknownFields
();
int
GetCachedSize
()
const
PROTOBUF_FINAL
{
return
_cached_size_
;
}
private:
private:
void
SharedCtor
();
void
SharedDtor
();
void
SetCachedSize
(
int
size
)
const
;
void
InternalSwap
(
ProgramDesc
*
other
);
private:
private:
inline
::
google
::
protobuf
::
Arena
*
GetArenaNoVirtual
()
const
{
return
NULL
;
}
inline
void
*
MaybeArenaPtr
()
const
{
return
NULL
;
}
public:
public:
::
std
::
string
GetTypeName
()
const
PROTOBUF_FINAL
;
// nested types
...
...
@@ -2659,7 +2637,7 @@ class ProgramDesc
blocks
()
const
;
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.ProgramDesc)
private:
private:
::
google
::
protobuf
::
internal
::
InternalMetadataWithArenaLite
_internal_metadata_
;
::
google
::
protobuf
::
internal
::
HasBits
<
1
>
_has_bits_
;
...
...
@@ -2697,15 +2675,14 @@ inline const ::std::string &OpDesc_Attr::name() const {
}
inline
void
OpDesc_Attr
::
set_name
(
const
::
std
::
string
&
value
)
{
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
value
);
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
value
);
// @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpDesc.Attr.name)
}
#if LANG_CXX11
inline
void
OpDesc_Attr
::
set_name
(
::
std
::
string
&&
value
)
{
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
move
(
value
));
// @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpDesc.Attr.name)
}
...
...
@@ -2713,15 +2690,13 @@ inline void OpDesc_Attr::set_name(::std::string &&value) {
inline
void
OpDesc_Attr
::
set_name
(
const
char
*
value
)
{
GOOGLE_DCHECK
(
value
!=
NULL
);
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
string
(
value
));
// @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpDesc.Attr.name)
}
inline
void
OpDesc_Attr
::
set_name
(
const
char
*
value
,
size_t
size
)
{
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
string
(
reinterpret_cast
<
const
char
*>
(
value
),
size
));
// @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpDesc.Attr.name)
}
...
...
@@ -2954,8 +2929,7 @@ inline void OpDesc_Attr::set_strings(int index, const char *value) {
}
inline
void
OpDesc_Attr
::
set_strings
(
int
index
,
const
char
*
value
,
size_t
size
)
{
strings_
.
Mutable
(
index
)
->
assign
(
reinterpret_cast
<
const
char
*>
(
value
),
size
);
strings_
.
Mutable
(
index
)
->
assign
(
reinterpret_cast
<
const
char
*>
(
value
),
size
);
// @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpDesc.Attr.strings)
}
inline
::
std
::
string
*
OpDesc_Attr
::
add_strings
()
{
...
...
@@ -3147,8 +3121,7 @@ inline void OpDesc_Var::set_allocated_parameter(::std::string *parameter) {
clear_has_parameter
();
}
parameter_
.
SetAllocatedNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
parameter
);
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
parameter
);
// @@protoc_insertion_point(field_set_allocated:paddle_mobile.framework.proto.OpDesc.Var.parameter)
}
...
...
@@ -3239,15 +3212,14 @@ inline const ::std::string &OpDesc::type() const {
}
inline
void
OpDesc
::
set_type
(
const
::
std
::
string
&
value
)
{
set_has_type
();
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
value
);
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
value
);
// @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpDesc.type)
}
#if LANG_CXX11
inline
void
OpDesc
::
set_type
(
::
std
::
string
&&
value
)
{
set_has_type
();
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
move
(
value
));
// @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpDesc.type)
}
...
...
@@ -3255,15 +3227,13 @@ inline void OpDesc::set_type(::std::string &&value) {
inline
void
OpDesc
::
set_type
(
const
char
*
value
)
{
GOOGLE_DCHECK
(
value
!=
NULL
);
set_has_type
();
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
string
(
value
));
// @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpDesc.type)
}
inline
void
OpDesc
::
set_type
(
const
char
*
value
,
size_t
size
)
{
set_has_type
();
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
string
(
reinterpret_cast
<
const
char
*>
(
value
),
size
));
// @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpDesc.type)
}
...
...
@@ -3421,15 +3391,14 @@ inline const ::std::string &OpProto_Var::name() const {
}
inline
void
OpProto_Var
::
set_name
(
const
::
std
::
string
&
value
)
{
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
value
);
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
value
);
// @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpProto.Var.name)
}
#if LANG_CXX11
inline
void
OpProto_Var
::
set_name
(
::
std
::
string
&&
value
)
{
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
move
(
value
));
// @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpProto.Var.name)
}
...
...
@@ -3437,15 +3406,13 @@ inline void OpProto_Var::set_name(::std::string &&value) {
inline
void
OpProto_Var
::
set_name
(
const
char
*
value
)
{
GOOGLE_DCHECK
(
value
!=
NULL
);
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
string
(
value
));
// @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpProto.Var.name)
}
inline
void
OpProto_Var
::
set_name
(
const
char
*
value
,
size_t
size
)
{
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
string
(
reinterpret_cast
<
const
char
*>
(
value
),
size
));
// @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpProto.Var.name)
}
...
...
@@ -3629,15 +3596,14 @@ inline const ::std::string &OpProto_Attr::name() const {
}
inline
void
OpProto_Attr
::
set_name
(
const
::
std
::
string
&
value
)
{
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
value
);
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
value
);
// @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpProto.Attr.name)
}
#if LANG_CXX11
inline
void
OpProto_Attr
::
set_name
(
::
std
::
string
&&
value
)
{
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
move
(
value
));
// @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpProto.Attr.name)
}
...
...
@@ -3645,15 +3611,13 @@ inline void OpProto_Attr::set_name(::std::string &&value) {
inline
void
OpProto_Attr
::
set_name
(
const
char
*
value
)
{
GOOGLE_DCHECK
(
value
!=
NULL
);
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
string
(
value
));
// @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpProto.Attr.name)
}
inline
void
OpProto_Attr
::
set_name
(
const
char
*
value
,
size_t
size
)
{
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
string
(
reinterpret_cast
<
const
char
*>
(
value
),
size
));
// @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpProto.Attr.name)
}
...
...
@@ -3813,15 +3777,14 @@ inline const ::std::string &OpProto::type() const {
}
inline
void
OpProto
::
set_type
(
const
::
std
::
string
&
value
)
{
set_has_type
();
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
value
);
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
value
);
// @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpProto.type)
}
#if LANG_CXX11
inline
void
OpProto
::
set_type
(
::
std
::
string
&&
value
)
{
set_has_type
();
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
move
(
value
));
// @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpProto.type)
}
...
...
@@ -3829,15 +3792,13 @@ inline void OpProto::set_type(::std::string &&value) {
inline
void
OpProto
::
set_type
(
const
char
*
value
)
{
GOOGLE_DCHECK
(
value
!=
NULL
);
set_has_type
();
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
string
(
value
));
// @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpProto.type)
}
inline
void
OpProto
::
set_type
(
const
char
*
value
,
size_t
size
)
{
set_has_type
();
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
type_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
string
(
reinterpret_cast
<
const
char
*>
(
value
),
size
));
// @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpProto.type)
}
...
...
@@ -4443,8 +4404,7 @@ inline ::paddle_mobile::framework::proto::VarType_TensorDesc *
VarType
::
mutable_selected_rows
()
{
set_has_selected_rows
();
if
(
selected_rows_
==
NULL
)
{
selected_rows_
=
new
::
paddle_mobile
::
framework
::
proto
::
VarType_TensorDesc
;
selected_rows_
=
new
::
paddle_mobile
::
framework
::
proto
::
VarType_TensorDesc
;
}
// @@protoc_insertion_point(field_mutable:paddle_mobile.framework.proto.VarType.selected_rows)
return
selected_rows_
;
...
...
@@ -4453,8 +4413,7 @@ inline ::paddle_mobile::framework::proto::VarType_TensorDesc *
VarType
::
release_selected_rows
()
{
// @@protoc_insertion_point(field_release:paddle_mobile.framework.proto.VarType.selected_rows)
clear_has_selected_rows
();
::
paddle_mobile
::
framework
::
proto
::
VarType_TensorDesc
*
temp
=
selected_rows_
;
::
paddle_mobile
::
framework
::
proto
::
VarType_TensorDesc
*
temp
=
selected_rows_
;
selected_rows_
=
NULL
;
return
temp
;
}
...
...
@@ -4489,8 +4448,8 @@ VarType::lod_tensor() const {
lod_tensor_
;
// @@protoc_insertion_point(field_get:paddle_mobile.framework.proto.VarType.lod_tensor)
return
p
!=
NULL
?
*
p
:
*
reinterpret_cast
<
const
::
paddle_mobile
::
framework
::
proto
::
VarType_LoDTensorDesc
*>
(
:
*
reinterpret_cast
<
const
::
paddle_mobile
::
framework
::
proto
::
VarType_LoDTensorDesc
*>
(
&::
paddle_mobile
::
framework
::
proto
::
_VarType_LoDTensorDesc_default_instance_
);
}
...
...
@@ -4498,8 +4457,7 @@ inline ::paddle_mobile::framework::proto::VarType_LoDTensorDesc *
VarType
::
mutable_lod_tensor
()
{
set_has_lod_tensor
();
if
(
lod_tensor_
==
NULL
)
{
lod_tensor_
=
new
::
paddle_mobile
::
framework
::
proto
::
VarType_LoDTensorDesc
;
lod_tensor_
=
new
::
paddle_mobile
::
framework
::
proto
::
VarType_LoDTensorDesc
;
}
// @@protoc_insertion_point(field_mutable:paddle_mobile.framework.proto.VarType.lod_tensor)
return
lod_tensor_
;
...
...
@@ -4508,8 +4466,7 @@ inline ::paddle_mobile::framework::proto::VarType_LoDTensorDesc *
VarType
::
release_lod_tensor
()
{
// @@protoc_insertion_point(field_release:paddle_mobile.framework.proto.VarType.lod_tensor)
clear_has_lod_tensor
();
::
paddle_mobile
::
framework
::
proto
::
VarType_LoDTensorDesc
*
temp
=
lod_tensor_
;
::
paddle_mobile
::
framework
::
proto
::
VarType_LoDTensorDesc
*
temp
=
lod_tensor_
;
lod_tensor_
=
NULL
;
return
temp
;
}
...
...
@@ -4544,8 +4501,7 @@ VarType::tensor_array() const {
const
::
paddle_mobile
::
framework
::
proto
::
VarType_LoDTensorArrayDesc
*
p
=
tensor_array_
;
// @@protoc_insertion_point(field_get:paddle_mobile.framework.proto.VarType.tensor_array)
return
p
!=
NULL
?
*
p
return
p
!=
NULL
?
*
p
:
*
reinterpret_cast
<
const
::
paddle_mobile
::
framework
::
proto
::
VarType_LoDTensorArrayDesc
*>
(
&::
paddle_mobile
::
framework
::
proto
::
...
...
@@ -4644,18 +4600,18 @@ inline void VarType::set_has_channel() { _has_bits_[0] |= 0x00000010u; }
inline
void
VarType
::
clear_has_channel
()
{
_has_bits_
[
0
]
&=
~
0x00000010u
;
}
inline
void
VarType
::
clear_channel
()
{
if
(
channel_
!=
NULL
)
channel_
->::
paddle_mobile
::
framework
::
proto
::
VarType_ChannelDesc
::
Clear
();
channel_
->::
paddle_mobile
::
framework
::
proto
::
VarType_ChannelDesc
::
Clear
();
clear_has_channel
();
}
inline
const
::
paddle_mobile
::
framework
::
proto
::
VarType_ChannelDesc
&
VarType
::
channel
()
const
{
const
::
paddle_mobile
::
framework
::
proto
::
VarType_ChannelDesc
*
p
=
channel_
;
// @@protoc_insertion_point(field_get:paddle_mobile.framework.proto.VarType.channel)
return
p
!=
NULL
?
*
p
:
*
reinterpret_cast
<
const
::
paddle_mobile
::
framework
::
proto
::
VarType_ChannelDesc
*>
(
&::
paddle_mobile
::
framework
::
proto
::
return
p
!=
NULL
?
*
p
:
*
reinterpret_cast
<
const
::
paddle_mobile
::
framework
::
proto
::
VarType_ChannelDesc
*>
(
&::
paddle_mobile
::
framework
::
proto
::
_VarType_ChannelDesc_default_instance_
);
}
inline
::
paddle_mobile
::
framework
::
proto
::
VarType_ChannelDesc
*
...
...
@@ -4759,15 +4715,14 @@ inline const ::std::string &VarDesc::name() const {
}
inline
void
VarDesc
::
set_name
(
const
::
std
::
string
&
value
)
{
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
value
);
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
value
);
// @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.VarDesc.name)
}
#if LANG_CXX11
inline
void
VarDesc
::
set_name
(
::
std
::
string
&&
value
)
{
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
move
(
value
));
// @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.VarDesc.name)
}
...
...
@@ -4775,15 +4730,13 @@ inline void VarDesc::set_name(::std::string &&value) {
inline
void
VarDesc
::
set_name
(
const
char
*
value
)
{
GOOGLE_DCHECK
(
value
!=
NULL
);
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
string
(
value
));
// @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.VarDesc.name)
}
inline
void
VarDesc
::
set_name
(
const
char
*
value
,
size_t
size
)
{
set_has_name
();
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
name_
.
SetNoArena
(
&::
google
::
protobuf
::
internal
::
GetEmptyStringAlreadyInited
(),
::
std
::
string
(
reinterpret_cast
<
const
char
*>
(
value
),
size
));
// @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.VarDesc.name)
}
...
...
src/framework/lod_tensor.cc
浏览文件 @
1ad8f821
...
...
@@ -152,8 +152,7 @@ bool CheckLoD(const LoD &in, int tensor_height) {
// check: all the offsets in a level should be ascending(no same
// items
// allows).
if
(
!
std
::
is_sorted
(
level
.
begin
(),
level
.
begin
(),
[](
size_t
a
,
size_t
b
)
{
if
(
!
std
::
is_sorted
(
level
.
begin
(),
level
.
begin
(),
[](
size_t
a
,
size_t
b
)
{
if
(
a
<
b
)
return
true
;
return
false
;
...
...
@@ -188,8 +187,7 @@ bool CheckAbsLoD(const LoD &in, int tensor_height) {
// check: all the offsets in a level should be ascending(no same
// items
// allows).
if
(
!
std
::
is_sorted
(
level
.
begin
(),
level
.
begin
(),
[](
size_t
a
,
size_t
b
)
{
if
(
!
std
::
is_sorted
(
level
.
begin
(),
level
.
begin
(),
[](
size_t
a
,
size_t
b
)
{
if
(
a
<
b
)
return
true
;
return
false
;
...
...
src/framework/lod_tensor.h
浏览文件 @
1ad8f821
...
...
@@ -102,7 +102,7 @@ bool CheckAbsLoD(const LoD &in, int tensor_height = -1);
* see https://en.wikipedia.org/wiki/Level_of_details for reference.
*/
class
LoDTensor
:
public
Tensor
{
public:
public:
LoDTensor
()
:
Tensor
()
{}
explicit
LoDTensor
(
const
LoD
&
lod
)
:
lod_
(
lod
)
{}
...
...
@@ -139,7 +139,7 @@ class LoDTensor : public Tensor {
return
(
lod_
)[
level
].
size
()
-
1
;
}
private:
private:
LoD
lod_
;
};
...
...
src/framework/op_desc.cpp
浏览文件 @
1ad8f821
src/framework/op_desc.h
浏览文件 @
1ad8f821
...
...
@@ -26,7 +26,7 @@ namespace paddle_mobile {
namespace
framework
{
class
OpDesc
:
PaddleMobileObject
{
public:
public:
OpDesc
(
const
proto
::
OpDesc
&
desc
);
const
std
::
vector
<
std
::
string
>
&
Input
(
const
std
::
string
&
name
)
const
;
const
std
::
vector
<
std
::
string
>
&
Output
(
const
std
::
string
&
name
)
const
;
...
...
@@ -40,7 +40,7 @@ class OpDesc : PaddleMobileObject {
const
std
::
string
&
Type
()
{
return
desc_
.
type
();
};
private:
private:
proto
::
OpDesc
desc_
;
VariableNameMap
inputs_
;
VariableNameMap
outputs_
;
...
...
src/framework/op_info.h
浏览文件 @
1ad8f821
...
...
@@ -39,7 +39,7 @@ template <typename Dtype> class OpInfoMap;
template
<
typename
Dtype
>
static
OpInfoMap
<
Dtype
>
*
g_op_info_map
=
nullptr
;
template
<
typename
Dtype
>
class
OpInfoMap
{
public:
public:
static
OpInfoMap
&
Instance
()
{
if
(
g_op_info_map
<
Dtype
>
==
nullptr
)
{
g_op_info_map
<
Dtype
>
=
new
OpInfoMap
();
...
...
@@ -83,7 +83,7 @@ template <typename Dtype> class OpInfoMap {
return
&
map_
;
}
private:
private:
OpInfoMap
()
=
default
;
std
::
unordered_map
<
std
::
string
,
OpInfo
<
Dtype
>>
map_
;
...
...
src/framework/op_kernel_type.h
浏览文件 @
1ad8f821
...
...
@@ -27,8 +27,7 @@ struct OpKernelType {
struct
Hash
{
size_t
operator
()(
const
OpKernelType
&
key
)
const
{
int
data_type
=
static_cast
<
int
>
(
key
.
data_type_
)
<<
LEFT_SHIFT
;
int
data_layout
=
static_cast
<
int
>
(
key
.
data_layout_
)
<<
(
LEFT_SHIFT
*
2
);
int
data_layout
=
static_cast
<
int
>
(
key
.
data_layout_
)
<<
(
LEFT_SHIFT
*
2
);
std
::
hash
<
int
>
hasher
;
return
hasher
(
data_type
+
data_layout
);
...
...
src/framework/operator.cpp
浏览文件 @
1ad8f821
src/framework/operator.h
浏览文件 @
1ad8f821
...
...
@@ -49,7 +49,7 @@ static std::unordered_map<
{
"fetch"
,
{{
"X"
},
{
"Out"
}}}};
template
<
typename
Dtype
>
class
OperatorBase
:
PaddleMobileObject
{
public:
public:
OperatorBase
(
const
std
::
string
&
type
,
const
VariableNameMap
&
inputs
,
const
VariableNameMap
&
outputs
,
const
AttributeMap
&
attrs
,
std
::
shared_ptr
<
Scope
>
scope
);
...
...
@@ -66,30 +66,30 @@ template <typename Dtype> class OperatorBase : PaddleMobileObject {
}
}
protected:
protected:
std
::
shared_ptr
<
Scope
>
scope_
;
std
::
string
type_
;
VariableNameMap
inputs_
;
VariableNameMap
outputs_
;
AttributeMap
attrs_
;
private:
private:
void
CheckAllInputOutputSet
()
const
;
};
template
<
typename
Dtype
>
class
OperatorWithKernel
:
public
OperatorBase
<
Dtype
>
{
public:
public:
OperatorWithKernel
(
const
std
::
string
&
type
,
const
VariableNameMap
&
inputs
,
const
VariableNameMap
&
output
s
,
const
AttributeMap
&
attrs
,
std
::
shared_ptr
<
Scope
>
scope
)
const
VariableNameMap
&
outputs
,
const
AttributeMap
&
attr
s
,
std
::
shared_ptr
<
Scope
>
scope
)
:
OperatorBase
<
Dtype
>
(
type
,
inputs
,
outputs
,
attrs
,
scope
)
{}
virtual
void
InferShape
()
const
=
0
;
virtual
void
Run
()
const
=
0
;
};
template
<
typename
Dtype
,
typename
P
>
class
OpKernelBase
:
PaddleMobileObject
{
public:
public:
virtual
void
Compute
(
const
P
&
para
)
const
=
0
;
virtual
~
OpKernelBase
()
=
default
;
...
...
src/framework/paddle_mobile_object.h
浏览文件 @
1ad8f821
...
...
@@ -24,13 +24,13 @@ SOFTWARE.
namespace
paddle_mobile
{
class
PaddleMobileObject
{
public:
public:
virtual
std
::
string
ToString
()
{
char
address
[
128
]
=
{
0
};
sprintf
(
address
,
"%p"
,
this
);
return
std
::
string
(
address
);
}
private:
private:
};
}
// namespace paddle_mobile
src/framework/program-optimize/node.cpp
浏览文件 @
1ad8f821
src/framework/program-optimize/node.h
浏览文件 @
1ad8f821
...
...
@@ -29,7 +29,7 @@ namespace paddle_mobile {
namespace
framework
{
class
Node
:
PaddleMobileObject
{
public:
public:
Node
(
const
std
::
string
&
type
)
:
type_
(
type
)
{}
Node
(
std
::
shared_ptr
<
OpDesc
>
op_desc
)
:
op_desc_
(
op_desc
),
type_
(
op_desc
->
Type
()){};
...
...
@@ -39,7 +39,7 @@ class Node : PaddleMobileObject {
Node
&
To
(
int
index
);
uint
depth
(
uint
begin
=
0
);
private:
private:
std
::
shared_ptr
<
OpDesc
>
op_desc_
;
std
::
string
ToString
(
std
::
string
blank
,
const
Node
*
node
)
const
;
std
::
vector
<
std
::
shared_ptr
<
Node
>>
outputs_
;
...
...
src/framework/program-optimize/program_optimize.cpp
浏览文件 @
1ad8f821
...
...
@@ -35,8 +35,7 @@ ProgramOptimize::FushionOptimize(std::shared_ptr<ProgramDesc> ori_des) {
auto
op
=
block
->
Ops
()[
j
];
auto
op_type
=
op
->
Type
();
// DLOG << "op type: " << op_type << " index: " << j;
if
(
op_input_output_key
.
find
(
op
->
Type
())
==
op_input_output_key
.
end
())
{
if
(
op_input_output_key
.
find
(
op
->
Type
())
==
op_input_output_key
.
end
())
{
return
NULL
;
}
...
...
src/framework/program-optimize/program_optimize.h
浏览文件 @
1ad8f821
...
...
@@ -26,13 +26,13 @@ namespace paddle_mobile {
namespace
framework
{
class
ProgramOptimize
{
public:
public:
ProgramOptimize
()
{}
std
::
shared_ptr
<
ProgramDesc
>
Optimize
();
std
::
shared_ptr
<
ProgramDesc
>
FushionOptimize
(
std
::
shared_ptr
<
ProgramDesc
>
ori_des
);
private:
private:
// std::shared_ptr<ProgramDesc> ori_desc_;
std
::
vector
<
std
::
unordered_map
<
std
::
string
,
std
::
shared_ptr
<
Node
>>>
outputs_nodes_
;
...
...
src/framework/program.h
浏览文件 @
1ad8f821
...
...
@@ -28,12 +28,12 @@ namespace framework {
template
<
typename
Dtype
,
Precision
P
=
Precision
::
FP32
>
class
Program
:
PaddleMobileObject
{
public:
public:
std
::
shared_ptr
<
ProgramDesc
>
originProgram
;
std
::
shared_ptr
<
ProgramDesc
>
optimizeProgram
;
std
::
shared_ptr
<
Scope
>
scope
;
private:
private:
};
}
// namespace framework
...
...
src/framework/program_desc.cpp
浏览文件 @
1ad8f821
src/framework/program_desc.h
浏览文件 @
1ad8f821
...
...
@@ -28,12 +28,12 @@ namespace paddle_mobile {
namespace
framework
{
class
ProgramDesc
:
PaddleMobileObject
{
public:
public:
ProgramDesc
(
const
proto
::
ProgramDesc
&
desc
);
std
::
shared_ptr
<
BlockDesc
>
Block
(
size_t
idx
);
const
std
::
vector
<
std
::
shared_ptr
<
BlockDesc
>>
&
Blocks
()
{
return
blocks_
;
};
private:
private:
std
::
vector
<
std
::
shared_ptr
<
BlockDesc
>>
blocks_
;
proto
::
ProgramDesc
desc_
;
};
...
...
src/framework/scope.cc
浏览文件 @
1ad8f821
src/framework/scope.h
浏览文件 @
1ad8f821
...
...
@@ -26,7 +26,7 @@ SOFTWARE.
namespace
paddle_mobile
{
namespace
framework
{
class
Scope
{
public:
public:
Scope
()
{}
~
Scope
()
{}
...
...
@@ -67,7 +67,7 @@ class Scope {
Variable
*
FindVarLocally
(
const
std
::
string
&
name
)
const
;
private:
private:
// Call Scope::NewScope for a sub-scope.
explicit
Scope
(
Scope
const
*
parent
)
:
parent_
(
parent
)
{}
...
...
src/framework/selected_rows.h
浏览文件 @
1ad8f821
...
...
@@ -27,7 +27,7 @@ namespace paddle_mobile {
namespace
framework
{
class
SelectedRows
{
public:
public:
SelectedRows
(
const
std
::
vector
<
int64_t
>
&
rows
,
const
int64_t
&
height
)
:
rows_
(
rows
),
height_
(
height
)
{
value_
.
reset
(
new
Tensor
());
...
...
@@ -67,7 +67,7 @@ class SelectedRows {
return
make_ddim
(
dims
);
}
private:
private:
// Notice: rows can be duplicate. We can have {0, 4, 7, 0, 5, 7, 9}
// here.
// SelectedRows are simply concated when adding together. Until a
...
...
src/framework/tensor.h
浏览文件 @
1ad8f821
...
...
@@ -56,8 +56,7 @@ struct SizeOfTypeFunctor<HEAD, TAIL...> {
};
static
inline
size_t
SizeOfType
(
std
::
type_index
type
)
{
SizeOfTypeFunctor
<
int
,
float
,
double
,
int16_t
,
int64_t
,
bool
,
size_t
>
functor
;
SizeOfTypeFunctor
<
int
,
float
,
double
,
int16_t
,
int64_t
,
bool
,
size_t
>
functor
;
size_t
size
=
functor
(
type
);
// PADDLE_ENFORCE(size != 0UL, "Cannot get size of type %s",
// type.name());
...
...
@@ -67,7 +66,7 @@ static inline size_t SizeOfType(std::type_index type) {
class
LoDTensor
;
class
Tensor
{
public:
public:
Tensor
()
:
offset_
(
0
)
{}
/*! Return a pointer to mutable memory block. */
...
...
@@ -78,8 +77,8 @@ class Tensor {
// typeid(T).hash_code(),
// "Tensor holds the wrong type, it holds %s",
// this->holder_->type().name());
return
reinterpret_cast
<
T
*>
(
reinterpret_cast
<
uintptr_t
>
(
holder_
->
ptr
())
+
offset_
);
return
reinterpret_cast
<
T
*>
(
reinterpret_cast
<
uintptr_t
>
(
holder_
->
ptr
())
+
offset_
);
}
/*! Return a pointer to constant memory block. */
...
...
@@ -236,7 +235,7 @@ class Tensor {
inline
void
set_layout
(
const
DataLayout
layout
)
{
layout_
=
layout
;
}
private:
private:
/**
* @note Placeholder hides type T, so it doesn't appear as a
* template
...
...
src/framework/tensor_util.cc
浏览文件 @
1ad8f821
...
...
@@ -189,8 +189,7 @@ void TensorFromStream(std::istream &is, framework::Tensor *tensor) {
{
// read tensor
std
::
vector
<
int64_t
>
dims
;
dims
.
reserve
(
static_cast
<
size_t
>
(
desc
.
dims
().
size
()));
std
::
copy
(
desc
.
dims
().
begin
(),
desc
.
dims
().
end
(),
std
::
back_inserter
(
dims
));
std
::
copy
(
desc
.
dims
().
begin
(),
desc
.
dims
().
end
(),
std
::
back_inserter
(
dims
));
tensor
->
Resize
(
framework
::
make_ddim
(
dims
));
void
*
buf
;
...
...
src/framework/tensor_util.h
浏览文件 @
1ad8f821
src/framework/var_desc.h
浏览文件 @
1ad8f821
...
...
@@ -25,7 +25,7 @@ namespace paddle_mobile {
namespace
framework
{
class
VarDesc
{
public:
public:
VarDesc
(
const
proto
::
VarDesc
&
desc
);
std
::
string
Name
()
const
{
return
desc_
.
name
();
}
...
...
@@ -80,7 +80,7 @@ class VarDesc {
return
this
->
RepeatedToVector
(
tensor_desc
().
dims
());
}
private:
private:
proto
::
VarDesc
desc_
;
};
...
...
src/framework/var_type.h
浏览文件 @
1ad8f821
src/framework/variable.h
浏览文件 @
1ad8f821
...
...
@@ -28,7 +28,7 @@ SOFTWARE.
namespace
paddle_mobile
{
namespace
framework
{
class
Variable
:
public
PaddleMobileObject
{
public:
public:
template
<
typename
T
>
const
T
*
Get
()
const
{
return
static_cast
<
const
T
*>
(
holder_
->
Ptr
());
}
...
...
@@ -67,7 +67,7 @@ class Variable : public PaddleMobileObject {
void
SetName
(
const
std
::
string
*
name
)
{
name_
=
name
;
}
private:
private:
struct
Placeholder
{
Placeholder
()
=
default
;
virtual
~
Placeholder
()
=
default
;
...
...
src/io.cpp
浏览文件 @
1ad8f821
...
...
@@ -174,10 +174,8 @@ Loader<Dtype, P>::Load(const std::string &dirname) {
auto
var
=
scope
->
Var
(
var_desc
->
Name
());
if
(
var_desc
->
GetType
()
==
framework
::
proto
::
VarType
::
LOD_TENSOR
)
{
if
(
var_desc
->
Persistable
()
&&
var_desc
->
GetType
()
!=
framework
::
proto
::
VarType
::
FEED_MINIBATCH
&&
var_desc
->
GetType
()
!=
framework
::
proto
::
VarType
::
FETCH_LIST
)
{
var_desc
->
GetType
()
!=
framework
::
proto
::
VarType
::
FEED_MINIBATCH
&&
var_desc
->
GetType
()
!=
framework
::
proto
::
VarType
::
FETCH_LIST
)
{
framework
::
LoDTensor
*
tensor
=
var
->
GetMutable
<
framework
::
LoDTensor
>
();
// to load
...
...
@@ -268,8 +266,7 @@ Loader<Dtype, P>::Load(const std::string &dirname) {
}
if
(
var
.
persistable
()
&&
var
.
type
().
type
()
!=
framework
::
proto
::
VarType
::
FEED_MINIBATCH
&&
var
.
type
().
type
()
!=
framework
::
proto
::
VarType
::
FEED_MINIBATCH
&&
var
.
type
().
type
()
!=
framework
::
proto
::
VarType
::
FETCH_LIST
)
{
// std::cout << " to load " << var.name() <<
// std::endl;
...
...
@@ -289,8 +286,7 @@ Loader<Dtype, P>::Load(const std::string &dirname) {
// 2 Lod information
uint64_t
lod_level
;
is
.
read
(
reinterpret_cast
<
char
*>
(
&
lod_level
),
sizeof
(
lod_level
));
is
.
read
(
reinterpret_cast
<
char
*>
(
&
lod_level
),
sizeof
(
lod_level
));
// std::cout << " load level: " << lod_level <<
// std::endl;
// std::cout << " lod info: " << std::endl;
...
...
src/io.h
浏览文件 @
1ad8f821
...
...
@@ -29,10 +29,10 @@ namespace paddle_mobile {
template
<
typename
Dtype
,
Precision
P
=
Precision
::
FP32
>
class
Loader
:
PaddleMobileObject
{
public:
public:
const
framework
::
Program
<
Dtype
,
P
>
Load
(
const
std
::
string
&
dirname
);
private:
private:
void
LoadVar
(
framework
::
LoDTensor
*
tensor
,
const
std
::
string
&
file_path
);
};
...
...
src/memory/t_malloc.cc
浏览文件 @
1ad8f821
src/memory/t_malloc.h
浏览文件 @
1ad8f821
...
...
@@ -40,7 +40,7 @@ void Free(void *ptr);
template
<
typename
T
>
class
PODDeleter
{
static_assert
(
std
::
is_pod
<
T
>::
value
,
"T must be POD"
);
public:
public:
explicit
PODDeleter
(){};
void
operator
()(
T
*
ptr
)
{
Free
(
static_cast
<
void
*>
(
ptr
));
}
...
...
@@ -55,7 +55,7 @@ template <typename T> class PODDeleter {
* reinterpret_cast
*/
template
<
typename
T
>
class
PlainDeleter
{
public:
public:
explicit
PlainDeleter
(){};
void
operator
()(
T
*
ptr
)
{
Free
(
reinterpret_cast
<
void
*>
(
ptr
));
}
...
...
src/operators/batchnorm_op.cpp
浏览文件 @
1ad8f821
src/operators/batchnorm_op.h
浏览文件 @
1ad8f821
...
...
@@ -27,13 +27,13 @@ using namespace framework;
template
<
typename
DeviceType
,
typename
T
>
class
BatchNormOp
:
public
framework
::
OperatorWithKernel
<
DeviceType
>
{
public:
public:
BatchNormOp
(
const
std
::
string
&
type
,
const
VariableNameMap
&
inputs
,
const
VariableNameMap
&
outputs
,
const
framework
::
AttributeMap
attrs
,
std
::
shared_ptr
<
framework
::
Scope
>
scope
)
:
framework
::
OperatorWithKernel
<
DeviceType
>
(
type
,
inputs
,
output
s
,
attrs
,
scope
),
:
framework
::
OperatorWithKernel
<
DeviceType
>
(
type
,
inputs
,
outputs
,
attr
s
,
scope
),
param_
(
inputs
,
outputs
,
attrs
,
*
scope
)
{}
void
Run
()
const
{
...
...
@@ -44,7 +44,7 @@ class BatchNormOp : public framework::OperatorWithKernel<DeviceType> {
using
framework
::
OperatorWithKernel
<
DeviceType
>::
OperatorWithKernel
;
void
InferShape
()
const
override
;
protected:
protected:
BatchNormParam
param_
;
};
...
...
src/operators/concat_op.cpp
浏览文件 @
1ad8f821
src/operators/concat_op.h
浏览文件 @
1ad8f821
...
...
@@ -26,13 +26,12 @@ using namespace framework;
template
<
typename
DeviceType
,
typename
T
>
class
ConcatOp
:
public
framework
::
OperatorWithKernel
<
DeviceType
>
{
public:
public:
ConcatOp
(
const
std
::
string
&
type
,
const
VariableNameMap
&
inputs
,
const
VariableNameMap
&
outputs
,
const
framework
::
AttributeMap
attrs
,
const
VariableNameMap
&
outputs
,
const
framework
::
AttributeMap
attrs
,
std
::
shared_ptr
<
framework
::
Scope
>
scope
)
:
framework
::
OperatorWithKernel
<
DeviceType
>
(
type
,
inputs
,
output
s
,
attrs
,
scope
),
:
framework
::
OperatorWithKernel
<
DeviceType
>
(
type
,
inputs
,
outputs
,
attr
s
,
scope
),
param_
(
inputs
,
outputs
,
attrs
,
*
scope
)
{}
void
Run
()
const
{
...
...
@@ -43,7 +42,7 @@ class ConcatOp : public framework::OperatorWithKernel<DeviceType> {
using
framework
::
OperatorWithKernel
<
DeviceType
>::
OperatorWithKernel
;
void
InferShape
()
const
override
;
protected:
protected:
ConcatParam
param_
;
};
...
...
src/operators/conv_op.cpp
浏览文件 @
1ad8f821
...
...
@@ -60,9 +60,9 @@ void ConvOp<Dtype, T>::InferShape() const {
std
::
vector
<
int64_t
>
output_shape
({
in_dims
[
0
],
filter_dims
[
0
]});
for
(
size_t
i
=
0
;
i
<
strides
.
size
();
++
i
)
{
output_shape
.
push_back
(
ConvOutputSize
(
in
_dims
[
i
+
2
],
filter_dims
[
i
+
2
],
dilation
s
[
i
],
paddings
[
i
],
strides
[
i
]));
output_shape
.
push_back
(
ConvOutputSize
(
in_dims
[
i
+
2
],
filter
_dims
[
i
+
2
],
dilations
[
i
],
padding
s
[
i
],
strides
[
i
]));
}
framework
::
DDim
ddim
=
framework
::
make_ddim
(
output_shape
);
...
...
src/operators/conv_op.h
浏览文件 @
1ad8f821
...
...
@@ -28,12 +28,12 @@ using namespace framework;
template
<
typename
DeviceType
,
typename
T
>
class
ConvOp
:
public
framework
::
OperatorWithKernel
<
DeviceType
>
{
public:
public:
ConvOp
(
const
std
::
string
&
type
,
const
VariableNameMap
&
inputs
,
const
VariableNameMap
&
outputs
,
const
framework
::
AttributeMap
&
attrs
,
std
::
shared_ptr
<
framework
::
Scope
>
scope
)
:
framework
::
OperatorWithKernel
<
DeviceType
>
(
type
,
inputs
,
output
s
,
attrs
,
scope
),
:
framework
::
OperatorWithKernel
<
DeviceType
>
(
type
,
inputs
,
outputs
,
attr
s
,
scope
),
param_
(
inputs
,
outputs
,
attrs
,
*
scope
)
{}
using
framework
::
OperatorWithKernel
<
DeviceType
>::
OperatorWithKernel
;
...
...
@@ -45,7 +45,7 @@ class ConvOp : public framework::OperatorWithKernel<DeviceType> {
this
->
ClearVariables
({
"Filter"
,
"Input"
});
}
private:
private:
ConvParam
param_
;
};
...
...
src/operators/elementwise_add_op.cpp
浏览文件 @
1ad8f821
src/operators/elementwise_add_op.h
浏览文件 @
1ad8f821
...
...
@@ -27,13 +27,13 @@ using namespace framework;
template
<
typename
DeviceType
,
typename
T
>
class
ElementwiseAddOp
:
public
framework
::
OperatorWithKernel
<
DeviceType
>
{
public:
public:
ElementwiseAddOp
(
const
std
::
string
&
type
,
const
VariableNameMap
&
inputs
,
const
VariableNameMap
&
outputs
,
const
framework
::
AttributeMap
attrs
,
std
::
shared_ptr
<
framework
::
Scope
>
scope
)
:
framework
::
OperatorWithKernel
<
DeviceType
>
(
type
,
inputs
,
output
s
,
attrs
,
scope
),
:
framework
::
OperatorWithKernel
<
DeviceType
>
(
type
,
inputs
,
outputs
,
attr
s
,
scope
),
param_
(
inputs
,
outputs
,
attrs
,
*
scope
)
{}
void
Run
()
const
{
...
...
@@ -44,7 +44,7 @@ class ElementwiseAddOp : public framework::OperatorWithKernel<DeviceType> {
using
framework
::
OperatorWithKernel
<
DeviceType
>::
OperatorWithKernel
;
void
InferShape
()
const
override
;
protected:
protected:
ElementwiseAddParam
param_
;
};
}
// namespace operators
...
...
src/operators/kernel/arm/batchnorm_kernel.cpp
浏览文件 @
1ad8f821
...
...
@@ -67,15 +67,14 @@ void BatchNormKernel<CPU, float>::Compute(const BatchNormParam ¶m) const {
/// (x * inv_var * scale) + (bias - est_mean * inv_var * scale)
for
(
int
i
=
0
;
i
<
C
;
i
++
)
{
new_scale_ptr
[
i
]
=
inv_std_ptr
[
i
]
*
scale_ptr
[
i
];
new_bias_ptr
[
i
]
=
bias_ptr
[
i
]
-
mean_ptr
[
i
]
*
inv_std_ptr
[
i
]
*
scale_ptr
[
i
];
new_bias_ptr
[
i
]
=
bias_ptr
[
i
]
-
mean_ptr
[
i
]
*
inv_std_ptr
[
i
]
*
scale_ptr
[
i
];
{
for
(
int
n
=
0
;
n
<
N
;
n
++
)
{
for
(
int
h
=
0
;
h
<
H
;
h
++
)
{
for
(
int
w
=
0
;
w
<
W
;
w
++
)
{
int
index
=
n
*
stride0
+
i
*
stride1
+
h
*
stride2
+
w
;
out_ptr
[
index
]
=
input_x_ptr
[
index
]
*
new_scale_ptr
[
i
]
+
new_bias_ptr
[
i
];
out_ptr
[
index
]
=
input_x_ptr
[
index
]
*
new_scale_ptr
[
i
]
+
new_bias_ptr
[
i
];
}
}
}
...
...
src/operators/kernel/arm/concat_kernel.cpp
浏览文件 @
1ad8f821
...
...
@@ -19,7 +19,7 @@ limitations under the License. */
namespace
paddle_mobile
{
namespace
operators
{
template
<
typename
T
>
class
ConcatFunctor
{
public:
public:
void
operator
()(
const
std
::
vector
<
framework
::
Tensor
>
&
input
,
const
int
axis
,
framework
::
Tensor
*
output
)
{
size_t
num
=
input
.
size
();
...
...
@@ -80,8 +80,7 @@ void StridedNumelCopyWithAxis(int64_t axis, T *dst,
}
for
(
int64_t
i
=
0
;
i
<
before
;
++
i
)
{
memory
::
Copy
(
dst
+
i
*
dst_after
,
src
+
i
*
src_after
,
sizeof
(
T
)
*
size
);
memory
::
Copy
(
dst
+
i
*
dst_after
,
src
+
i
*
src_after
,
sizeof
(
T
)
*
size
);
}
}
...
...
@@ -98,9 +97,9 @@ void ConcatKernel<CPU, float>::Compute(const ConcatParam ¶m) const {
for
(
auto
*
in
:
inputs
)
{
auto
in_stride
=
framework
::
stride_numel
(
in
->
dims
());
auto
out_stride
=
framework
::
stride_numel
(
out
->
dims
());
StridedNumelCopyWithAxis
<
float
>
(
axis
,
out
->
data
<
float
>
()
+
output_offset
,
out
_stride
,
in
->
data
<
float
>
(),
in_stride
,
in_stride
[
axis
]);
StridedNumelCopyWithAxis
<
float
>
(
axis
,
out
->
data
<
float
>
()
+
output_offset
,
out_stride
,
in
->
data
<
float
>
(),
in
_stride
,
in_stride
[
axis
]);
output_offset
+=
in_stride
[
axis
];
}
}
else
{
...
...
src/operators/kernel/arm/conv_kernel.cpp
浏览文件 @
1ad8f821
...
...
@@ -138,12 +138,10 @@ template <> void ConvKernel<CPU, float>::Compute(const ConvParam ¶m) const {
}
// gemm
Tensor
out_slice
=
out_batch
.
Slice
(
g
*
out_step
,
(
g
+
1
)
*
out_step
);
Tensor
filter_slice
=
filter
.
Slice
(
g
*
out_step
,
(
g
+
1
)
*
out_step
);
math
::
matmul
<
float
>
(
filter_slice
,
false
,
col_matrix
,
false
,
float
(
1.0
),
&
out_slice
,
float
(
0.0
));
Tensor
out_slice
=
out_batch
.
Slice
(
g
*
out_step
,
(
g
+
1
)
*
out_step
);
Tensor
filter_slice
=
filter
.
Slice
(
g
*
out_step
,
(
g
+
1
)
*
out_step
);
math
::
matmul
<
float
>
(
filter_slice
,
false
,
col_matrix
,
false
,
float
(
1.0
),
&
out_slice
,
float
(
0.0
));
}
}
}
...
...
src/operators/kernel/arm/elementwise_add_kernel.cpp
浏览文件 @
1ad8f821
src/operators/kernel/arm/lrn_kernel.cpp
浏览文件 @
1ad8f821
src/operators/kernel/arm/mul_kernel.cpp
浏览文件 @
1ad8f821
src/operators/kernel/arm/pool_kernel.cpp
浏览文件 @
1ad8f821
src/operators/kernel/batchnorm_kernel.h
浏览文件 @
1ad8f821
...
...
@@ -28,7 +28,7 @@ using namespace framework;
template
<
typename
DeviceType
,
typename
T
>
class
BatchNormKernel
:
public
framework
::
OpKernelBase
<
DeviceType
,
BatchNormParam
>
{
public:
public:
void
Compute
(
const
BatchNormParam
&
param
)
const
;
};
...
...
src/operators/kernel/concat_kernel.h
浏览文件 @
1ad8f821
...
...
@@ -26,7 +26,7 @@ using namespace framework;
template
<
typename
DeviceType
,
typename
T
>
class
ConcatKernel
:
public
framework
::
OpKernelBase
<
DeviceType
,
ConcatParam
>
{
public:
public:
void
Compute
(
const
ConcatParam
&
param
)
const
;
};
...
...
src/operators/kernel/conv_kernel.h
浏览文件 @
1ad8f821
...
...
@@ -31,7 +31,7 @@ using namespace framework;
template
<
typename
DeviceType
,
typename
T
>
class
ConvKernel
:
public
framework
::
OpKernelBase
<
DeviceType
,
ConvParam
>
{
public:
public:
void
Compute
(
const
ConvParam
&
param
)
const
;
};
}
// namespace operators
...
...
src/operators/kernel/elementwise_add_kernel.h
浏览文件 @
1ad8f821
...
...
@@ -29,7 +29,7 @@ using namespace framework;
template
<
typename
DeviceType
,
typename
T
>
class
ElementwiseAddKernel
:
public
framework
::
OpKernelBase
<
DeviceType
,
ElementwiseAddParam
>
{
public:
public:
void
Compute
(
const
ElementwiseAddParam
&
param
)
const
;
};
}
// namespace operators
...
...
src/operators/kernel/lrn_kernel.h
浏览文件 @
1ad8f821
...
...
@@ -26,8 +26,8 @@ namespace operators {
using
namespace
framework
;
template
<
typename
T
>
struct
LRNFunctor
{
void
operator
()(
const
framework
::
Tensor
&
input
,
framework
::
Tensor
*
out
,
int
N
,
int
C
,
int
H
,
int
W
,
int
n
,
T
k
,
T
alpha
,
T
beta
)
{
void
operator
()(
const
framework
::
Tensor
&
input
,
framework
::
Tensor
*
out
,
int
N
,
int
C
,
int
H
,
int
W
,
int
n
,
T
k
,
T
alpha
,
T
beta
)
{
auto
input_ptr
=
input
.
data
<
T
>
();
const
int
start
=
-
(
n
-
1
)
/
2
;
const
int
end
=
start
+
n
;
...
...
@@ -47,14 +47,11 @@ template <typename T> struct LRNFunctor {
if
(
channel
>=
0
&&
channel
<
C
)
{
for
(
int
c
=
0
;
c
<
H
;
c
++
)
{
for
(
int
d
=
0
;
d
<
W
;
d
++
)
{
int
u
=
a
*
stride0
+
b
*
stride1
+
c
*
stride2
+
d
;
int
u
=
a
*
stride0
+
b
*
stride1
+
c
*
stride2
+
d
;
int
i
=
a
*
stride0
+
channel
*
stride1
+
c
*
stride2
+
d
;
int
i
=
a
*
stride0
+
channel
*
stride1
+
c
*
stride2
+
d
;
sqr_buffer_ptr
[
u
]
+=
alpha
*
input_ptr
[
i
]
*
input_ptr
[
i
];
sqr_buffer_ptr
[
u
]
+=
alpha
*
input_ptr
[
i
]
*
input_ptr
[
i
];
}
}
}
...
...
@@ -70,7 +67,7 @@ template <typename T> struct LRNFunctor {
template
<
typename
DeviceType
,
typename
T
>
class
LrnKernel
:
public
framework
::
OpKernelBase
<
DeviceType
,
LrnParam
>
{
public:
public:
void
Compute
(
const
LrnParam
&
param
)
const
;
};
}
// namespace operators
...
...
src/operators/kernel/mul_kernel.h
浏览文件 @
1ad8f821
...
...
@@ -28,7 +28,7 @@ using namespace framework;
template
<
typename
DeviceType
,
typename
T
>
class
MulKernel
:
public
framework
::
OpKernelBase
<
DeviceType
,
MulParam
>
{
public:
public:
void
Compute
(
const
MulParam
&
param
)
const
;
};
}
// namespace operators
...
...
src/operators/kernel/pool_kernel.h
浏览文件 @
1ad8f821
...
...
@@ -28,7 +28,7 @@ using namespace framework;
template
<
typename
DeviceType
,
typename
T
>
class
PoolKernel
:
public
framework
::
OpKernelBase
<
DeviceType
,
PoolParam
>
{
public:
public:
void
Compute
(
const
PoolParam
&
param
)
const
;
};
}
// namespace operators
...
...
src/operators/lrn_op.cpp
浏览文件 @
1ad8f821
src/operators/lrn_op.h
浏览文件 @
1ad8f821
...
...
@@ -27,12 +27,12 @@ using namespace framework;
template
<
typename
DeviceType
,
typename
T
>
class
LrnOp
:
public
framework
::
OperatorWithKernel
<
DeviceType
>
{
public:
public:
LrnOp
(
const
std
::
string
&
type
,
const
VariableNameMap
&
inputs
,
const
VariableNameMap
&
outputs
,
const
framework
::
AttributeMap
attrs
,
std
::
shared_ptr
<
framework
::
Scope
>
scope
)
:
framework
::
OperatorWithKernel
<
DeviceType
>
(
type
,
inputs
,
output
s
,
attrs
,
scope
),
:
framework
::
OperatorWithKernel
<
DeviceType
>
(
type
,
inputs
,
outputs
,
attr
s
,
scope
),
param_
(
inputs
,
outputs
,
attrs
,
*
scope
)
{}
void
Run
()
const
{
...
...
@@ -43,7 +43,7 @@ class LrnOp : public framework::OperatorWithKernel<DeviceType> {
using
framework
::
OperatorWithKernel
<
DeviceType
>::
OperatorWithKernel
;
void
InferShape
()
const
override
;
protected:
protected:
LrnParam
param_
;
};
...
...
src/operators/math/elementwise_op_function.h
浏览文件 @
1ad8f821
...
...
@@ -69,7 +69,7 @@ inline void trim_trailing_singular_dims(framework::DDim *dims) {
}
template
<
typename
T
>
class
RowwiseTransformIterator
{
public:
public:
RowwiseTransformIterator
(
const
T
*
ptr
,
int
n
)
:
ptr_
(
ptr
),
i_
(
0
),
n_
(
n
)
{}
RowwiseTransformIterator
<
T
>
&
operator
++
()
{
...
...
@@ -90,7 +90,7 @@ template <typename T> class RowwiseTransformIterator {
const
T
&
operator
*
()
{
return
ptr_
[
i_
];
}
private:
private:
const
T
*
ptr_
;
int
i_
;
int64_t
n_
;
...
...
@@ -101,7 +101,7 @@ template <typename T> class RowwiseTransformIterator {
/// in (4,20,2) is 2 ,
/// (20,1) move 1 stride , to fill(add) 2 element with the same number.
template
<
typename
T
>
class
MidWiseTransformIterator
{
public:
public:
MidWiseTransformIterator
(
const
T
*
ptr
,
int
n
,
int
post
)
:
ptr_
(
ptr
),
i_
(
0
),
j_
(
0
),
n_
(
n
),
post_
(
post
)
{}
...
...
@@ -127,7 +127,7 @@ template <typename T> class MidWiseTransformIterator {
const
T
&
operator
*
()
{
return
ptr_
[
i_
];
}
private:
private:
const
T
*
ptr_
;
int64_t
i_
;
int64_t
j_
;
...
...
@@ -137,7 +137,7 @@ template <typename T> class MidWiseTransformIterator {
template
<
typename
Functor
,
typename
T
,
typename
OutType
=
T
>
class
TransformFunctor
{
public:
public:
TransformFunctor
(
const
framework
::
Tensor
*
x
,
const
framework
::
Tensor
*
y
,
framework
::
Tensor
*
z
,
Functor
func
)
:
x_
(
x
->
data
<
T
>
()),
y_
(
y
->
data
<
T
>
()),
z_
(
z
->
mutable_data
<
OutType
>
()),
...
...
@@ -156,11 +156,10 @@ class TransformFunctor {
inline
void
RunMidWise
(
int
n
,
int
pre
,
int
post
)
const
{
math
::
Transform
trans
;
trans
(
x_
,
x_
+
nx_
,
MidWiseTransformIterator
<
T
>
(
y_
,
n
,
post
),
z_
,
func_
);
trans
(
x_
,
x_
+
nx_
,
MidWiseTransformIterator
<
T
>
(
y_
,
n
,
post
),
z_
,
func_
);
}
private:
private:
const
T
*
x_
;
const
T
*
y_
;
OutType
*
z_
;
...
...
src/operators/math/im2col.cc
浏览文件 @
1ad8f821
...
...
@@ -26,9 +26,8 @@ namespace math {
* output_width]
*/
template
<
class
T
>
class
Im2ColFunctor
<
ColFormat
::
kCFO
,
CPU
,
T
>
{
public:
void
operator
()(
const
framework
::
Tensor
&
im
,
const
std
::
vector
<
int
>
&
dilation
,
public:
void
operator
()(
const
framework
::
Tensor
&
im
,
const
std
::
vector
<
int
>
&
dilation
,
const
std
::
vector
<
int
>
&
stride
,
const
std
::
vector
<
int
>
&
padding
,
framework
::
Tensor
*
col
)
{
// PADDLE_ENFORCE(im.dims().size() == 3);
...
...
@@ -72,17 +71,13 @@ template <class T> class Im2ColFunctor<ColFormat::kCFO, CPU, T> {
int
h_offset
=
(
c
/
filter_width
)
%
filter_height
;
int
c_im
=
c
/
(
filter_width
*
filter_height
);
for
(
int
h
=
0
;
h
<
col_height
;
++
h
)
{
int
im_row_idx
=
h
*
stride
[
0
]
-
padding
[
0
]
+
h_offset
*
dilation
[
0
];
int
im_row_idx
=
h
*
stride
[
0
]
-
padding
[
0
]
+
h_offset
*
dilation
[
0
];
for
(
int
w
=
0
;
w
<
col_width
;
++
w
)
{
int
im_col_idx
=
w
*
stride
[
1
]
-
padding
[
1
]
+
w_offset
*
dilation
[
1
];
int
im_col_idx
=
w
*
stride
[
1
]
-
padding
[
1
]
+
w_offset
*
dilation
[
1
];
int
col_idx
=
(
c
*
col_height
+
h
)
*
col_width
+
w
;
int
im_idx
=
(
im_row_idx
+
c_im
*
im_height
)
*
im_width
+
im_col_idx
;
int
im_idx
=
(
im_row_idx
+
c_im
*
im_height
)
*
im_width
+
im_col_idx
;
col_data
[
col_idx
]
=
(
im_row_idx
<
0
||
im_row_idx
>=
im_height
||
col_data
[
col_idx
]
=
(
im_row_idx
<
0
||
im_row_idx
>=
im_height
||
im_col_idx
<
0
||
im_col_idx
>=
im_width
)
?
static_cast
<
T
>
(
0
)
:
im_data
[
im_idx
];
...
...
@@ -99,7 +94,7 @@ template <class T> class Im2ColFunctor<ColFormat::kCFO, CPU, T> {
* output_width]
*/
template
<
class
T
>
class
Col2ImFunctor
<
ColFormat
::
kCFO
,
CPU
,
T
>
{
public:
public:
void
operator
()(
const
framework
::
Tensor
&
col
,
const
std
::
vector
<
int
>
&
dilation
,
const
std
::
vector
<
int
>
&
stride
,
...
...
@@ -145,15 +140,12 @@ template <class T> class Col2ImFunctor<ColFormat::kCFO, CPU, T> {
int
h_offset
=
(
c
/
filter_width
)
%
filter_height
;
int
c_im
=
c
/
(
filter_width
*
filter_height
);
for
(
int
h
=
0
;
h
<
col_height
;
++
h
)
{
int
im_row_idx
=
h
*
stride
[
0
]
-
padding
[
0
]
+
h_offset
*
dilation
[
0
];
int
im_row_idx
=
h
*
stride
[
0
]
-
padding
[
0
]
+
h_offset
*
dilation
[
0
];
for
(
int
w
=
0
;
w
<
col_width
;
++
w
)
{
int
im_col_idx
=
w
*
stride
[
1
]
-
padding
[
1
]
+
w_offset
*
dilation
[
1
];
int
im_col_idx
=
w
*
stride
[
1
]
-
padding
[
1
]
+
w_offset
*
dilation
[
1
];
if
((
im_row_idx
)
>=
0
&&
(
im_row_idx
)
<
im_height
&&
(
im_col_idx
)
>=
0
&&
(
im_col_idx
)
<
im_width
)
{
im_data
[(
im_row_idx
+
c_im
*
im_height
)
*
im_width
+
im_col_idx
]
+=
im_data
[(
im_row_idx
+
c_im
*
im_height
)
*
im_width
+
im_col_idx
]
+=
col_data
[(
c
*
col_height
+
h
)
*
col_width
+
w
];
}
}
...
...
@@ -174,9 +166,8 @@ template class Col2ImFunctor<ColFormat::kCFO, CPU, double>;
* filter_width]
*/
template
<
class
T
>
class
Im2ColFunctor
<
ColFormat
::
kOCF
,
CPU
,
T
>
{
public:
void
operator
()(
const
framework
::
Tensor
&
im
,
const
std
::
vector
<
int
>
&
dilation
,
public:
void
operator
()(
const
framework
::
Tensor
&
im
,
const
std
::
vector
<
int
>
&
dilation
,
const
std
::
vector
<
int
>
&
stride
,
const
std
::
vector
<
int
>
&
padding
,
framework
::
Tensor
*
col
)
{
// PADDLE_ENFORCE(im.dims().size() == 3);
...
...
@@ -210,29 +201,25 @@ template <class T> class Im2ColFunctor<ColFormat::kOCF, CPU, T> {
for
(
int
channel
=
0
;
channel
<
im_channels
;
++
channel
)
{
for
(
int
filter_row_idx
=
0
;
filter_row_idx
<
filter_height
;
++
filter_row_idx
)
{
int
im_row_offset
=
col_row_idx
*
stride
[
0
]
+
filter_row_idx
-
padding
[
0
];
for
(
int
filter_col_idx
=
0
;
filter_col_idx
<
filter_width
;
++
filter_col_idx
)
{
int
im_col_offset
=
col_col_idx
*
stride
[
1
]
+
filter_col_idx
-
padding
[
1
];
int
im_row_offset
=
col_row_idx
*
stride
[
0
]
+
filter_row_idx
-
padding
[
0
];
for
(
int
filter_col_idx
=
0
;
filter_col_idx
<
filter_width
;
++
filter_col_idx
)
{
int
im_col_offset
=
col_col_idx
*
stride
[
1
]
+
filter_col_idx
-
padding
[
1
];
int
col_offset
=
((((
col_row_idx
)
*
col_width
+
col_col_idx
)
*
im_channels
+
((((
col_row_idx
)
*
col_width
+
col_col_idx
)
*
im_channels
+
channel
)
*
filter_height
+
filter_row_idx
)
*
filter_width
+
filter_col_idx
;
int
im_offset
=
(
channel
*
im_height
+
im_row_offset
)
*
im_width
+
int
im_offset
=
(
channel
*
im_height
+
im_row_offset
)
*
im_width
+
im_col_offset
;
col_data
[
col_offset
]
=
(
im_row_offset
<
0
||
im_row_offset
>=
im_height
||
(
im_row_offset
<
0
||
im_row_offset
>=
im_height
||
im_col_offset
<
0
||
im_col_offset
>=
im_width
)
?
static_cast
<
T
>
(
0
)
:
im_data
[
im_offset
];
...
...
@@ -251,7 +238,7 @@ template <class T> class Im2ColFunctor<ColFormat::kOCF, CPU, T> {
* filter_width]
*/
template
<
class
T
>
class
Col2ImFunctor
<
ColFormat
::
kOCF
,
CPU
,
T
>
{
public:
public:
void
operator
()(
const
framework
::
Tensor
&
col
,
const
std
::
vector
<
int
>
&
dilation
,
const
std
::
vector
<
int
>
&
stride
,
...
...
@@ -287,29 +274,25 @@ template <class T> class Col2ImFunctor<ColFormat::kOCF, CPU, T> {
for
(
int
channel
=
0
;
channel
<
im_channels
;
++
channel
)
{
for
(
int
filter_row_idx
=
0
;
filter_row_idx
<
filter_height
;
++
filter_row_idx
)
{
int
im_row_offset
=
col_row_idx
*
stride
[
0
]
+
filter_row_idx
-
padding
[
0
];
for
(
int
filter_col_idx
=
0
;
filter_col_idx
<
filter_width
;
++
filter_col_idx
)
{
int
im_col_offset
=
col_col_idx
*
stride
[
1
]
+
filter_col_idx
-
padding
[
1
];
int
im_row_offset
=
col_row_idx
*
stride
[
0
]
+
filter_row_idx
-
padding
[
0
];
for
(
int
filter_col_idx
=
0
;
filter_col_idx
<
filter_width
;
++
filter_col_idx
)
{
int
im_col_offset
=
col_col_idx
*
stride
[
1
]
+
filter_col_idx
-
padding
[
1
];
int
col_offset
=
(((
col_row_idx
*
col_width
+
col_col_idx
)
*
im_channels
+
(((
col_row_idx
*
col_width
+
col_col_idx
)
*
im_channels
+
channel
)
*
filter_height
+
filter_row_idx
)
*
filter_width
+
filter_col_idx
;
if
(
im_row_offset
>=
0
&&
im_row_offset
<
im_height
&&
im_col_offset
>=
0
&&
im_col_offset
<
im_width
)
{
if
(
im_row_offset
>=
0
&&
im_row_offset
<
im_height
&&
im_col_offset
>=
0
&&
im_col_offset
<
im_width
)
{
int
im_offset
=
(
channel
*
im_height
+
im_row_offset
)
*
im_width
+
(
channel
*
im_height
+
im_row_offset
)
*
im_width
+
im_col_offset
;
im_data
[
im_offset
]
+=
col_data
[
col_offset
];
}
...
...
src/operators/math/im2col.h
浏览文件 @
1ad8f821
...
...
@@ -89,16 +89,15 @@ enum class ColFormat { kCFO = 0, kOCF = 1 };
*/
template
<
ColFormat
Format
,
typename
DeviceType
,
typename
T
>
class
Im2ColFunctor
{
public:
void
operator
()(
const
framework
::
Tensor
&
im
,
const
std
::
vector
<
int
>
&
dilation
,
public:
void
operator
()(
const
framework
::
Tensor
&
im
,
const
std
::
vector
<
int
>
&
dilation
,
const
std
::
vector
<
int
>
&
stride
,
const
std
::
vector
<
int
>
&
padding
,
framework
::
Tensor
*
col
);
};
template
<
ColFormat
Format
,
typename
DeviceType
,
typename
T
>
class
Col2ImFunctor
{
public:
public:
void
operator
()(
const
framework
::
Tensor
&
col
,
const
std
::
vector
<
int
>
&
dilation
,
const
std
::
vector
<
int
>
&
stride
,
...
...
src/operators/math/math_function.cc
浏览文件 @
1ad8f821
src/operators/math/pool3x3.h
浏览文件 @
1ad8f821
src/operators/math/pool_2x2.h
浏览文件 @
1ad8f821
src/operators/math/pooling.cpp
浏览文件 @
1ad8f821
...
...
@@ -30,9 +30,8 @@ namespace math {
*/
template
<
typename
PoolProcess
,
typename
T
>
class
PoolFunctor
<
CPU
,
PoolProcess
,
T
>
{
public:
void
operator
()(
const
framework
::
Tensor
&
input
,
const
std
::
vector
<
int
>
&
ksize
,
public:
void
operator
()(
const
framework
::
Tensor
&
input
,
const
std
::
vector
<
int
>
&
ksize
,
const
std
::
vector
<
int
>
&
strides
,
const
std
::
vector
<
int
>
&
paddings
,
PoolProcess
pool_process
,
framework
::
Tensor
*
output
)
{
...
...
@@ -77,8 +76,7 @@ class PoolFunctor<CPU, PoolProcess, T> {
T
ele
=
pool_process
.
initial
();
for
(
int
h
=
hstart
;
h
<
hend
;
++
h
)
{
for
(
int
w
=
wstart
;
w
<
wend
;
++
w
)
{
pool_process
.
compute
(
input_data
[
h
*
input_width
+
w
],
&
ele
);
pool_process
.
compute
(
input_data
[
h
*
input_width
+
w
],
&
ele
);
}
}
int
pool_size
=
(
hend
-
hstart
)
*
(
wend
-
wstart
);
...
...
src/operators/math/pooling.h
浏览文件 @
1ad8f821
...
...
@@ -38,7 +38,7 @@ namespace math {
* MaxPoolGrad and AvgPoolGrad are gradient operations respectively.
*/
template
<
class
T
>
class
MaxPool
{
public:
public:
inline
T
initial
()
{
return
static_cast
<
T
>
(
-
FLT_MAX
);
}
inline
void
compute
(
const
T
&
x
,
T
*
y
)
{
*
y
=
*
y
>
x
?
*
y
:
x
;
}
...
...
@@ -47,7 +47,7 @@ template <class T> class MaxPool {
};
template
<
class
T
>
class
AvgPool
{
public:
public:
inline
T
initial
()
{
return
static_cast
<
T
>
(
0
);
}
inline
void
compute
(
const
T
&
x
,
T
*
y
)
{
*
y
+=
x
;
}
...
...
@@ -57,9 +57,8 @@ template <class T> class AvgPool {
template
<
typename
DeviceType
,
typename
PoolProcess
,
typename
T
>
class
PoolFunctor
{
public:
void
operator
()(
const
framework
::
Tensor
&
input
,
const
std
::
vector
<
int
>
&
ksize
,
public:
void
operator
()(
const
framework
::
Tensor
&
input
,
const
std
::
vector
<
int
>
&
ksize
,
const
std
::
vector
<
int
>
&
strides
,
const
std
::
vector
<
int
>
&
paddings
,
PoolProcess
pool_compute
,
framework
::
Tensor
*
output
);
...
...
src/operators/math/transform.h
浏览文件 @
1ad8f821
src/operators/math/vol2col.cc
浏览文件 @
1ad8f821
...
...
@@ -26,7 +26,7 @@ using Tensor = paddle_mobile::framework::Tensor;
* output_depth, output_height, output_width]
*/
template
<
typename
T
>
class
Vol2ColFunctor
<
CPU
,
T
>
{
public:
public:
void
operator
()(
const
Tensor
&
vol
,
const
std
::
vector
<
int
>
&
dilations
,
const
std
::
vector
<
int
>
&
strides
,
const
std
::
vector
<
int
>
&
paddings
,
Tensor
*
col
)
const
{
...
...
@@ -81,28 +81,21 @@ template <typename T> class Vol2ColFunctor<CPU, T> {
int
d_offset
=
(
c
/
filter_width
/
filter_height
)
%
filter_depth
;
int
c_in
=
c
/
filter_width
/
filter_height
/
filter_depth
;
for
(
int
d
=
0
;
d
<
output_depth
;
++
d
)
{
int
d_pad
=
d
*
strides
[
0
]
-
paddings
[
0
]
+
d_offset
*
dilations
[
0
];
int
d_pad
=
d
*
strides
[
0
]
-
paddings
[
0
]
+
d_offset
*
dilations
[
0
];
for
(
int
h
=
0
;
h
<
output_height
;
++
h
)
{
int
h_pad
=
h
*
strides
[
1
]
-
paddings
[
1
]
+
h_offset
*
dilations
[
1
];
int
h_pad
=
h
*
strides
[
1
]
-
paddings
[
1
]
+
h_offset
*
dilations
[
1
];
for
(
int
w
=
0
;
w
<
output_width
;
++
w
)
{
int
w_pad
=
w
*
strides
[
2
]
-
paddings
[
2
]
+
w_offset
*
dilations
[
2
];
int
w_pad
=
w
*
strides
[
2
]
-
paddings
[
2
]
+
w_offset
*
dilations
[
2
];
int
col_idx
=
((
c
*
output_depth
+
d
)
*
output_height
+
h
)
*
output_width
+
w
;
((
c
*
output_depth
+
d
)
*
output_height
+
h
)
*
output_width
+
w
;
int
vol_idx
=
((
c_in
*
input_depth
+
d_pad
)
*
input_height
+
h_pad
)
*
((
c_in
*
input_depth
+
d_pad
)
*
input_height
+
h_pad
)
*
input_width
+
w_pad
;
col_data
[
col_idx
]
=
(
h_pad
<
0
||
h_pad
>=
input_height
||
w_pad
<
0
||
w_pad
>=
input_width
||
d_pad
<
0
||
d_pad
>=
input_depth
)
w_pad
>=
input_width
||
d_pad
<
0
||
d_pad
>=
input_depth
)
?
static_cast
<
T
>
(
0
)
:
vol_data
[
vol_idx
];
}
...
...
@@ -119,7 +112,7 @@ template <typename T> class Vol2ColFunctor<CPU, T> {
* output_depth, output_height, output_width]
*/
template
<
typename
T
>
class
Col2VolFunctor
<
CPU
,
T
>
{
public:
public:
void
operator
()(
const
Tensor
&
col
,
const
std
::
vector
<
int
>
&
dilations
,
const
std
::
vector
<
int
>
&
strides
,
const
std
::
vector
<
int
>
&
paddings
,
Tensor
*
vol
)
const
{
...
...
@@ -173,27 +166,21 @@ template <typename T> class Col2VolFunctor<CPU, T> {
int
d_offset
=
(
c
/
filter_width
/
filter_height
)
%
filter_depth
;
int
cIm
=
c
/
filter_width
/
filter_height
/
filter_depth
;
for
(
int
d
=
0
;
d
<
output_depth
;
++
d
)
{
int
d_pad
=
d
*
strides
[
0
]
-
paddings
[
0
]
+
d_offset
*
dilations
[
0
];
int
d_pad
=
d
*
strides
[
0
]
-
paddings
[
0
]
+
d_offset
*
dilations
[
0
];
for
(
int
h
=
0
;
h
<
output_height
;
++
h
)
{
int
h_pad
=
h
*
strides
[
1
]
-
paddings
[
1
]
+
h_offset
*
dilations
[
1
];
int
h_pad
=
h
*
strides
[
1
]
-
paddings
[
1
]
+
h_offset
*
dilations
[
1
];
for
(
int
w
=
0
;
w
<
output_width
;
++
w
)
{
int
w_pad
=
w
*
strides
[
2
]
-
paddings
[
2
]
+
w_offset
*
dilations
[
2
];
int
w_pad
=
w
*
strides
[
2
]
-
paddings
[
2
]
+
w_offset
*
dilations
[
2
];
if
(
h_pad
>=
0
&&
h_pad
<
input_height
&&
w_pad
>=
0
&&
w_pad
<
input_width
&&
d_pad
>=
0
&&
d_pad
<
input_depth
)
{
w_pad
<
input_width
&&
d_pad
>=
0
&&
d_pad
<
input_depth
)
{
int
vol_idx
=
((
cIm
*
input_depth
+
d_pad
)
*
input_height
+
h_pad
)
*
((
cIm
*
input_depth
+
d_pad
)
*
input_height
+
h_pad
)
*
input_width
+
w_pad
;
int
col_idx
=
((
c
*
output_depth
+
d
)
*
output_height
+
h
)
*
output_width
+
((
c
*
output_depth
+
d
)
*
output_height
+
h
)
*
output_width
+
w
;
vol_data
[
vol_idx
]
+=
col_data
[
col_idx
];
}
...
...
src/operators/math/vol2col.h
浏览文件 @
1ad8f821
...
...
@@ -73,14 +73,14 @@ namespace math {
using
Tensor
=
paddle_mobile
::
framework
::
Tensor
;
template
<
typename
DeviceType
,
typename
T
>
class
Vol2ColFunctor
{
public:
public:
void
operator
()(
const
Tensor
&
vol
,
const
std
::
vector
<
int
>
&
dilations
,
const
std
::
vector
<
int
>
&
strides
,
const
std
::
vector
<
int
>
&
paddings
,
Tensor
*
col
)
const
;
};
template
<
typename
DeviceType
,
typename
T
>
class
Col2VolFunctor
{
public:
public:
void
operator
()(
const
Tensor
&
col
,
const
std
::
vector
<
int
>
&
dilations
,
const
std
::
vector
<
int
>
&
strides
,
const
std
::
vector
<
int
>
&
paddings
,
Tensor
*
vol
)
const
;
...
...
src/operators/mul_op.cpp
浏览文件 @
1ad8f821
src/operators/mul_op.h
浏览文件 @
1ad8f821
...
...
@@ -27,12 +27,12 @@ using namespace framework;
template
<
typename
DeviceType
,
typename
T
>
class
MulOp
:
public
framework
::
OperatorWithKernel
<
DeviceType
>
{
public:
public:
MulOp
(
const
std
::
string
&
type
,
const
VariableNameMap
&
inputs
,
const
VariableNameMap
&
outputs
,
const
framework
::
AttributeMap
attrs
,
std
::
shared_ptr
<
framework
::
Scope
>
scope
)
:
framework
::
OperatorWithKernel
<
DeviceType
>
(
type
,
inputs
,
output
s
,
attrs
,
scope
),
:
framework
::
OperatorWithKernel
<
DeviceType
>
(
type
,
inputs
,
outputs
,
attr
s
,
scope
),
param_
(
inputs
,
outputs
,
attrs
,
*
scope
)
{}
void
Run
()
const
{
...
...
@@ -43,7 +43,7 @@ class MulOp : public framework::OperatorWithKernel<DeviceType> {
using
framework
::
OperatorWithKernel
<
DeviceType
>::
OperatorWithKernel
;
void
InferShape
()
const
override
;
protected:
protected:
MulParam
param_
;
};
...
...
src/operators/op_param.cpp
浏览文件 @
1ad8f821
...
...
@@ -24,8 +24,7 @@ Print &operator<<(Print &printer, const ConvParam &conv_param) {
printer
<<
"parameter of conv: "
<<
"
\n
"
;
printer
<<
" stride: "
<<
" ("
<<
conv_param
.
Strides
()[
0
]
<<
conv_param
.
Strides
()[
1
]
<<
") "
<<
" ("
<<
conv_param
.
Strides
()[
0
]
<<
conv_param
.
Strides
()[
1
]
<<
") "
<<
"
\n
"
;
printer
<<
" paddings: "
<<
" ("
<<
conv_param
.
Paddings
()[
0
]
<<
conv_param
.
Paddings
()[
1
]
...
...
src/operators/op_param.h
浏览文件 @
1ad8f821
...
...
@@ -31,8 +31,8 @@ namespace operators {
using
namespace
framework
;
class
OpParam
:
PaddleMobileObject
{
public:
protected:
public:
protected:
template
<
typename
T
>
static
T
*
InputFrom
(
const
VariableNameMap
&
inputs
,
const
Scope
&
scope
)
{
return
GetVarValue
<
T
>
(
"Input"
,
inputs
,
scope
);
...
...
@@ -62,8 +62,7 @@ class OpParam : PaddleMobileObject {
return
GetVarValue
<
T
>
(
"Mean"
,
inputs
,
scope
);
}
template
<
typename
T
>
static
T
*
InputScaleFrom
(
const
VariableNameMap
&
inputs
,
const
Scope
&
scope
)
{
static
T
*
InputScaleFrom
(
const
VariableNameMap
&
inputs
,
const
Scope
&
scope
)
{
return
GetVarValue
<
T
>
(
"Scale"
,
inputs
,
scope
);
}
...
...
@@ -104,8 +103,8 @@ class OpParam : PaddleMobileObject {
}
template
<
typename
T
>
static
T
*
GetVarValue
(
const
std
::
string
&
key
,
const
VariableNameMap
&
var_map
,
const
Scope
&
scope
)
{
static
T
*
GetVarValue
(
const
std
::
string
&
key
,
const
VariableNameMap
&
var_map
,
const
Scope
&
scope
)
{
auto
var_vec
=
var_map
.
at
(
key
);
if
(
!
var_vec
.
empty
())
{
// std::cout << " get var value -- " << var_vec[0] <<
...
...
@@ -133,7 +132,7 @@ class OpParam : PaddleMobileObject {
};
class
ConvParam
:
OpParam
{
public:
public:
ConvParam
(
const
VariableNameMap
&
inputs
,
const
VariableNameMap
&
outputs
,
const
framework
::
AttributeMap
&
attrs
,
const
framework
::
Scope
&
scope
)
{
...
...
@@ -160,7 +159,7 @@ class ConvParam : OpParam {
const
int
&
Groups
()
const
{
return
groups
;
}
private:
private:
Tensor
*
input_
;
Tensor
*
output_
;
LoDTensor
*
filter_
;
...
...
@@ -173,7 +172,7 @@ class ConvParam : OpParam {
Print
&
operator
<<
(
Print
&
printer
,
const
ConvParam
&
conv_param
);
class
ElementwiseAddParam
:
OpParam
{
public:
public:
ElementwiseAddParam
(
const
VariableNameMap
&
inputs
,
const
VariableNameMap
&
outputs
,
const
framework
::
AttributeMap
&
attrs
,
...
...
@@ -192,7 +191,7 @@ class ElementwiseAddParam : OpParam {
const
int
&
Axis
()
const
{
return
axis_
;
}
private:
private:
Tensor
*
input_x_
;
Tensor
*
input_y_
;
Tensor
*
out_
;
...
...
@@ -200,7 +199,7 @@ class ElementwiseAddParam : OpParam {
};
class
MulParam
:
OpParam
{
public:
public:
MulParam
(
const
VariableNameMap
&
inputs
,
const
VariableNameMap
&
outputs
,
const
framework
::
AttributeMap
&
attrs
,
const
framework
::
Scope
&
scope
)
{
...
...
@@ -221,7 +220,7 @@ class MulParam : OpParam {
const
int
&
YNumColDims
()
const
{
return
y_num_col_dims_
;
}
private:
private:
Tensor
*
input_x_
;
Tensor
*
input_y_
;
Tensor
*
out_
;
...
...
@@ -230,7 +229,7 @@ class MulParam : OpParam {
};
class
ConcatParam
:
public
OpParam
{
public:
public:
ConcatParam
(
const
VariableNameMap
&
inputs
,
const
VariableNameMap
&
outputs
,
const
framework
::
AttributeMap
&
attrs
,
const
framework
::
Scope
&
scope
)
{
...
...
@@ -245,14 +244,14 @@ class ConcatParam : public OpParam {
const
int
&
Axis
()
const
{
return
axis_
;
}
private:
private:
std
::
vector
<
Tensor
*>
inputs_
;
Tensor
*
out_
;
int
axis_
;
};
class
LrnParam
:
public
OpParam
{
public:
public:
LrnParam
(
const
VariableNameMap
&
inputs
,
const
VariableNameMap
&
outputs
,
const
framework
::
AttributeMap
&
attrs
,
const
framework
::
Scope
&
scope
)
{
...
...
@@ -282,7 +281,7 @@ class LrnParam : public OpParam {
const
std
::
string
&
DataFormat
()
const
{
return
data_format_
;
}
private:
private:
Tensor
*
input_x_
;
Tensor
*
out_
;
Tensor
*
mid_out_
;
...
...
@@ -293,9 +292,8 @@ class LrnParam : public OpParam {
std
::
string
data_format_
;
};
class
BatchNormParam
:
OpParam
{
public:
BatchNormParam
(
const
VariableNameMap
&
inputs
,
const
VariableNameMap
&
outputs
,
public:
BatchNormParam
(
const
VariableNameMap
&
inputs
,
const
VariableNameMap
&
outputs
,
const
framework
::
AttributeMap
&
attrs
,
const
framework
::
Scope
&
scope
)
{
input_x_
=
InputXFrom
<
framework
::
Tensor
>
(
inputs
,
scope
);
...
...
@@ -329,7 +327,7 @@ class BatchNormParam : OpParam {
const
std
::
string
&
DataFormat
()
const
{
return
data_format_
;
}
private:
private:
Tensor
*
input_x_
;
Tensor
*
output_y_
;
Tensor
*
input_bias_
;
...
...
@@ -342,7 +340,7 @@ class BatchNormParam : OpParam {
std
::
string
data_format_
;
};
class
PoolParam
:
public
OpParam
{
public:
public:
PoolParam
(
const
VariableNameMap
&
inputs
,
const
VariableNameMap
&
outputs
,
const
framework
::
AttributeMap
&
attrs
,
const
framework
::
Scope
&
scope
)
{
...
...
@@ -373,7 +371,7 @@ class PoolParam : public OpParam {
bool
isGlobalPooling
()
const
{
return
gloabal_pooling_
;
}
private:
private:
Tensor
*
input_
;
Tensor
*
output_
;
std
::
string
pooling_type_
;
...
...
src/operators/pool_op.cpp
浏览文件 @
1ad8f821
...
...
@@ -49,8 +49,8 @@ void PoolOp<DeviceType, T>::InferShape() const {
}
std
::
vector
<
int64_t
>
output_shape
({
in_x_dims
[
0
],
in_x_dims
[
1
]});
for
(
size_t
i
=
0
;
i
<
ksize
.
size
();
++
i
)
{
output_shape
.
push_back
(
PoolOutputSize
(
in_x_dims
[
i
+
2
],
ksize
[
i
],
paddings
[
i
],
strides
[
i
],
ceil_mode
));
output_shape
.
push_back
(
PoolOutputSize
(
in_x_dims
[
i
+
2
],
ksize
[
i
],
paddings
[
i
],
strides
[
i
],
ceil_mode
));
}
param_
.
Output
()
->
Resize
(
framework
::
make_ddim
(
output_shape
));
DLOG
<<
"infer shape out size ="
<<
param_
.
Output
()
->
numel
();
...
...
src/operators/pool_op.h
浏览文件 @
1ad8f821
...
...
@@ -28,12 +28,12 @@ using namespace framework;
template
<
typename
DeviceType
,
typename
T
>
class
PoolOp
:
public
framework
::
OperatorWithKernel
<
DeviceType
>
{
public:
public:
PoolOp
(
const
std
::
string
&
type
,
const
VariableNameMap
&
inputs
,
const
VariableNameMap
&
outputs
,
const
framework
::
AttributeMap
&
attrs
,
std
::
shared_ptr
<
framework
::
Scope
>
scope
)
:
framework
::
OperatorWithKernel
<
DeviceType
>
(
type
,
inputs
,
output
s
,
attrs
,
scope
),
:
framework
::
OperatorWithKernel
<
DeviceType
>
(
type
,
inputs
,
outputs
,
attr
s
,
scope
),
param_
(
inputs
,
outputs
,
attrs
,
*
scope
)
{}
using
framework
::
OperatorWithKernel
<
DeviceType
>::
OperatorWithKernel
;
void
InferShape
()
const
override
;
...
...
@@ -45,7 +45,7 @@ class PoolOp : public framework::OperatorWithKernel<DeviceType> {
this
->
ClearVariables
({
"X"
});
}
private:
private:
PoolParam
param_
;
};
}
// namespace operators
...
...
src/platform/data_type.h
浏览文件 @
1ad8f821
src/platform/macros.h
浏览文件 @
1ad8f821
...
...
@@ -17,7 +17,7 @@ limitations under the License. */
// Disable the copy and assignment operator for a class.
#ifndef DISABLE_COPY_AND_ASSIGN
#define DISABLE_COPY_AND_ASSIGN(classname) \
private:
\
private:
\
classname(const classname &) = delete; \
classname(classname &&) = delete; \
classname &operator=(const classname &) = delete; \
...
...
test/common/test_log.cpp
浏览文件 @
1ad8f821
test/framework/executor_for_test.cpp
浏览文件 @
1ad8f821
...
...
@@ -38,8 +38,8 @@ Executor4Test<DeviceType, OpType>::Executor4Test(const Program<DeviceType> p,
std
::
shared_ptr
<
OpDesc
>
op
=
ops
[
j
];
if
(
op
->
Type
()
==
op_type
)
{
std
::
shared_ptr
<
OpType
>
op_ptr
=
std
::
make_shared
<
OpType
>
(
op
->
Type
(),
op
->
GetInputs
(),
op
->
GetOutputs
(),
op
->
GetAttrMap
(),
this
->
program_
.
scope
);
op
->
Type
(),
op
->
GetInputs
(),
op
->
GetOutputs
(),
op
->
GetAttrMap
(),
this
->
program_
.
scope
);
this
->
ops_of_block_
[
*
block_desc
.
get
()].
push_back
(
op_ptr
);
break
;
...
...
test/framework/executor_for_test.h
浏览文件 @
1ad8f821
...
...
@@ -27,7 +27,7 @@ using namespace paddle_mobile::framework;
template
<
typename
DeviceType
,
typename
OpType
>
class
Executor4Test
:
public
Executor
<
DeviceType
>
{
public:
public:
Executor4Test
(
const
Program
<
DeviceType
>
p
,
std
::
string
op_type
);
std
::
shared_ptr
<
Tensor
>
predict
(
Tensor
&
t
,
std
::
string
input
,
...
...
test/framework/test_load.cpp
浏览文件 @
1ad8f821
test/framework/test_optimize.cpp
浏览文件 @
1ad8f821
test/operators/test_batchnorm_op.cpp
浏览文件 @
1ad8f821
...
...
@@ -24,7 +24,7 @@ namespace paddle_mobile {
namespace
framework
{
template
<
typename
Dtype
>
class
TestBatchNormOp
{
public:
public:
explicit
TestBatchNormOp
(
const
Program
<
Dtype
>
p
)
:
program_
(
p
)
{
if
(
use_optimize_
)
{
to_predict_program_
=
program_
.
optimizeProgram
;
...
...
@@ -52,8 +52,7 @@ template <typename Dtype> class TestBatchNormOp {
DLOG
<<
" Input Scale is : "
<<
op
->
Input
(
"Scale"
)[
0
];
DLOG
<<
" Input Bias is : "
<<
op
->
Input
(
"Bias"
)[
0
];
DLOG
<<
" Output Y is : "
<<
op
->
Output
(
"Y"
)[
0
];
DLOG
<<
" epsilon : "
<<
op
->
GetAttrMap
().
at
(
"epsilon"
).
Get
<
float
>
();
DLOG
<<
" epsilon : "
<<
op
->
GetAttrMap
().
at
(
"epsilon"
).
Get
<
float
>
();
std
::
shared_ptr
<
operators
::
BatchNormOp
<
Dtype
,
float
>>
lrn
=
std
::
make_shared
<
operators
::
BatchNormOp
<
Dtype
,
float
>>
(
op
->
Type
(),
op
->
GetInputs
(),
op
->
GetOutputs
(),
...
...
@@ -101,7 +100,7 @@ template <typename Dtype> class TestBatchNormOp {
return
out_tensor
;
}
private:
private:
const
framework
::
Program
<
Dtype
>
program_
;
std
::
shared_ptr
<
ProgramDesc
>
to_predict_program_
;
std
::
map
<
framework
::
BlockDesc
,
...
...
@@ -113,8 +112,7 @@ template <typename Dtype> class TestBatchNormOp {
const
Tensor
&
t4
,
const
Tensor
&
t5
,
int
block_id
)
{
std
::
shared_ptr
<
BlockDesc
>
to_predict_block
=
to_predict_program_
->
Block
(
block_id
);
for
(
int
j
=
0
;
j
<
ops_of_block_
[
*
to_predict_block
.
get
()].
size
();
++
j
)
{
for
(
int
j
=
0
;
j
<
ops_of_block_
[
*
to_predict_block
.
get
()].
size
();
++
j
)
{
auto
op
=
ops_of_block_
[
*
to_predict_block
.
get
()][
j
];
DLOG
<<
"op -> run()"
;
op
->
Run
();
...
...
@@ -140,8 +138,7 @@ int main() {
auto
*
inputx1_ptr
=
inputx1
.
data
<
float
>
();
paddle_mobile
::
framework
::
Tensor
mean
;
SetupTensor
<
float
>
(
&
mean
,
{
10
},
static_cast
<
float
>
(
0
),
static_cast
<
float
>
(
1
));
SetupTensor
<
float
>
(
&
mean
,
{
10
},
static_cast
<
float
>
(
0
),
static_cast
<
float
>
(
1
));
auto
*
mean_ptr
=
mean
.
data
<
float
>
();
paddle_mobile
::
framework
::
Tensor
scale
;
...
...
@@ -155,12 +152,11 @@ int main() {
auto
*
variance_ptr
=
variance
.
data
<
float
>
();
paddle_mobile
::
framework
::
Tensor
bias
;
SetupTensor
<
float
>
(
&
bias
,
{
10
},
static_cast
<
float
>
(
0
),
static_cast
<
float
>
(
1
));
SetupTensor
<
float
>
(
&
bias
,
{
10
},
static_cast
<
float
>
(
0
),
static_cast
<
float
>
(
1
));
auto
*
bias_ptr
=
bias
.
data
<
float
>
();
paddle_mobile
::
framework
::
TestBatchNormOp
<
paddle_mobile
::
CPU
>
testBatchNormOp
(
program
);
paddle_mobile
::
framework
::
TestBatchNormOp
<
paddle_mobile
::
CPU
>
testBatchNormOp
(
program
);
auto
output_bn
=
testBatchNormOp
.
predict_bn
(
inputx1
,
mean
,
scale
,
variance
,
bias
);
...
...
test/operators/test_concat_op.cpp
浏览文件 @
1ad8f821
...
...
@@ -24,7 +24,7 @@ namespace paddle_mobile {
namespace
framework
{
template
<
typename
Dtype
>
class
TestConcatOp
{
public:
public:
explicit
TestConcatOp
(
const
Program
<
Dtype
>
p
)
:
program_
(
p
)
{
if
(
use_optimize_
)
{
to_predict_program_
=
program_
.
optimizeProgram
;
...
...
@@ -41,15 +41,13 @@ template <typename Dtype> class TestConcatOp {
// DLOG << " ops " << ops.size();
for
(
int
j
=
0
;
j
<
ops
.
size
();
++
j
)
{
std
::
shared_ptr
<
OpDesc
>
op
=
ops
[
j
];
if
(
op
->
Type
()
==
"concat"
&&
op
->
Input
(
"X"
)[
0
]
==
"conv2d_3.tmp_1"
)
{
if
(
op
->
Type
()
==
"concat"
&&
op
->
Input
(
"X"
)[
0
]
==
"conv2d_3.tmp_1"
)
{
DLOG
<<
" mul attr size: "
<<
op
->
GetAttrMap
().
size
();
DLOG
<<
" inputs size: "
<<
op
->
GetInputs
().
size
();
DLOG
<<
" outputs size: "
<<
op
->
GetOutputs
().
size
();
DLOG
<<
" Input X is : "
<<
op
->
Input
(
"X"
)[
0
];
DLOG
<<
" Output Out is : "
<<
op
->
Output
(
"Out"
)[
0
];
DLOG
<<
" axis : "
<<
op
->
GetAttrMap
().
at
(
"axis"
).
Get
<
int
>
();
DLOG
<<
" axis : "
<<
op
->
GetAttrMap
().
at
(
"axis"
).
Get
<
int
>
();
std
::
shared_ptr
<
operators
::
ConcatOp
<
Dtype
,
float
>>
concat
=
std
::
make_shared
<
operators
::
ConcatOp
<
Dtype
,
float
>>
(
...
...
@@ -94,7 +92,7 @@ template <typename Dtype> class TestConcatOp {
return
out_tensor
;
}
private:
private:
const
framework
::
Program
<
Dtype
>
program_
;
std
::
shared_ptr
<
ProgramDesc
>
to_predict_program_
;
std
::
map
<
framework
::
BlockDesc
,
...
...
@@ -106,8 +104,7 @@ template <typename Dtype> class TestConcatOp {
const
Tensor
&
t4
,
int
block_id
)
{
std
::
shared_ptr
<
BlockDesc
>
to_predict_block
=
to_predict_program_
->
Block
(
block_id
);
for
(
int
j
=
0
;
j
<
ops_of_block_
[
*
to_predict_block
.
get
()].
size
();
++
j
)
{
for
(
int
j
=
0
;
j
<
ops_of_block_
[
*
to_predict_block
.
get
()].
size
();
++
j
)
{
auto
op
=
ops_of_block_
[
*
to_predict_block
.
get
()][
j
];
DLOG
<<
"op -> run()"
;
op
->
Run
();
...
...
@@ -168,8 +165,7 @@ int main() {
/// output (4,100,2,2)
int
input_index
=
input_n
*
stride0
+
input_c
*
stride1
+
input_h
*
stride2
+
input_w
;
int
output_index
=
input_n
*
100
*
2
*
2
+
int
output_index
=
input_n
*
100
*
2
*
2
+
(
input_c
+
inputx1
.
dims
()[
1
]
+
inputx2
.
dims
()[
1
])
*
2
*
2
+
input_h
*
2
+
input_w
;
...
...
test/operators/test_cov_op.cpp
浏览文件 @
1ad8f821
test/operators/test_elementwise_add_op.cpp
浏览文件 @
1ad8f821
...
...
@@ -24,7 +24,7 @@ namespace paddle_mobile {
namespace
framework
{
template
<
typename
Dtype
>
class
TestElementwiseAddOp
{
public:
public:
explicit
TestElementwiseAddOp
(
const
Program
<
Dtype
>
p
)
:
program_
(
p
)
{
if
(
use_optimize_
)
{
to_predict_program_
=
program_
.
optimizeProgram
;
...
...
@@ -43,8 +43,7 @@ template <typename Dtype> class TestElementwiseAddOp {
std
::
shared_ptr
<
OpDesc
>
op
=
ops
[
j
];
if
(
op
->
Type
()
==
"elementwise_add"
&&
op
->
Input
(
"X"
)[
0
]
==
"batch_norm_2.tmp_2"
)
{
DLOG
<<
" elementwise_add attr size: "
<<
op
->
GetAttrMap
().
size
();
DLOG
<<
" elementwise_add attr size: "
<<
op
->
GetAttrMap
().
size
();
DLOG
<<
" inputs size: "
<<
op
->
GetInputs
().
size
();
DLOG
<<
" outputs size: "
<<
op
->
GetOutputs
().
size
();
DLOG
<<
" Input X is : "
<<
op
->
Input
(
"X"
)[
0
];
...
...
@@ -54,9 +53,8 @@ template <typename Dtype> class TestElementwiseAddOp {
int
axis
=
axis_attr
.
Get
<
int
>
();
DLOG
<<
" Attr axis is : "
<<
axis
;
std
::
shared_ptr
<
operators
::
ElementwiseAddOp
<
Dtype
,
float
>>
add
=
std
::
make_shared
<
operators
::
ElementwiseAddOp
<
Dtype
,
float
>>
(
std
::
shared_ptr
<
operators
::
ElementwiseAddOp
<
Dtype
,
float
>>
add
=
std
::
make_shared
<
operators
::
ElementwiseAddOp
<
Dtype
,
float
>>
(
op
->
Type
(),
op
->
GetInputs
(),
op
->
GetOutputs
(),
op
->
GetAttrMap
(),
program_
.
scope
);
ops_of_block_
[
*
block_desc
.
get
()].
push_back
(
add
);
...
...
@@ -89,7 +87,7 @@ template <typename Dtype> class TestElementwiseAddOp {
return
out_tensor
;
}
private:
private:
const
framework
::
Program
<
Dtype
>
program_
;
std
::
shared_ptr
<
ProgramDesc
>
to_predict_program_
;
std
::
map
<
framework
::
BlockDesc
,
...
...
@@ -100,8 +98,7 @@ template <typename Dtype> class TestElementwiseAddOp {
void
predict_add
(
const
Tensor
&
t1
,
const
Tensor
&
t2
,
int
block_id
)
{
std
::
shared_ptr
<
BlockDesc
>
to_predict_block
=
to_predict_program_
->
Block
(
block_id
);
for
(
int
j
=
0
;
j
<
ops_of_block_
[
*
to_predict_block
.
get
()].
size
();
++
j
)
{
for
(
int
j
=
0
;
j
<
ops_of_block_
[
*
to_predict_block
.
get
()].
size
();
++
j
)
{
auto
op
=
ops_of_block_
[
*
to_predict_block
.
get
()][
j
];
DLOG
<<
"op -> run()"
;
op
->
Run
();
...
...
test/operators/test_lrn_op.cpp
浏览文件 @
1ad8f821
...
...
@@ -24,7 +24,7 @@ namespace paddle_mobile {
namespace
framework
{
template
<
typename
Dtype
>
class
TestLrnOp
{
public:
public:
explicit
TestLrnOp
(
const
Program
<
Dtype
>
p
)
:
program_
(
p
)
{
if
(
use_optimize_
)
{
to_predict_program_
=
program_
.
optimizeProgram
;
...
...
@@ -41,18 +41,15 @@ template <typename Dtype> class TestLrnOp {
// DLOG << " ops " << ops.size();
for
(
int
j
=
0
;
j
<
ops
.
size
();
++
j
)
{
std
::
shared_ptr
<
OpDesc
>
op
=
ops
[
j
];
if
(
op
->
Type
()
==
"lrn"
&&
op
->
Input
(
"X"
)[
0
]
==
"pool2d_0.tmp_0"
)
{
if
(
op
->
Type
()
==
"lrn"
&&
op
->
Input
(
"X"
)[
0
]
==
"pool2d_0.tmp_0"
)
{
DLOG
<<
" mul attr size: "
<<
op
->
GetAttrMap
().
size
();
DLOG
<<
" inputs size: "
<<
op
->
GetInputs
().
size
();
DLOG
<<
" outputs size: "
<<
op
->
GetOutputs
().
size
();
DLOG
<<
" Input X is : "
<<
op
->
Input
(
"X"
)[
0
];
DLOG
<<
" Output Out is : "
<<
op
->
Output
(
"Out"
)[
0
];
DLOG
<<
" n : "
<<
op
->
GetAttrMap
().
at
(
"n"
).
Get
<
int
>
();
DLOG
<<
" alpha : "
<<
op
->
GetAttrMap
().
at
(
"alpha"
).
Get
<
float
>
();
DLOG
<<
" beta : "
<<
op
->
GetAttrMap
().
at
(
"beta"
).
Get
<
float
>
();
DLOG
<<
" alpha : "
<<
op
->
GetAttrMap
().
at
(
"alpha"
).
Get
<
float
>
();
DLOG
<<
" beta : "
<<
op
->
GetAttrMap
().
at
(
"beta"
).
Get
<
float
>
();
DLOG
<<
" k : "
<<
op
->
GetAttrMap
().
at
(
"k"
).
Get
<
float
>
();
std
::
shared_ptr
<
operators
::
LrnOp
<
Dtype
,
float
>>
lrn
=
std
::
make_shared
<
operators
::
LrnOp
<
Dtype
,
float
>>
(
...
...
@@ -84,7 +81,7 @@ template <typename Dtype> class TestLrnOp {
return
out_tensor
;
}
private:
private:
const
framework
::
Program
<
Dtype
>
program_
;
std
::
shared_ptr
<
ProgramDesc
>
to_predict_program_
;
std
::
map
<
framework
::
BlockDesc
,
...
...
@@ -95,8 +92,7 @@ template <typename Dtype> class TestLrnOp {
void
predict_lrn
(
const
Tensor
&
t1
,
int
block_id
)
{
std
::
shared_ptr
<
BlockDesc
>
to_predict_block
=
to_predict_program_
->
Block
(
block_id
);
for
(
int
j
=
0
;
j
<
ops_of_block_
[
*
to_predict_block
.
get
()].
size
();
++
j
)
{
for
(
int
j
=
0
;
j
<
ops_of_block_
[
*
to_predict_block
.
get
()].
size
();
++
j
)
{
auto
op
=
ops_of_block_
[
*
to_predict_block
.
get
()][
j
];
DLOG
<<
"op -> run()"
;
op
->
Run
();
...
...
@@ -151,9 +147,8 @@ int main() {
}
DLOGF
(
"
\n
"
);
}
DLOG
<<
inputx1_ptr
[
0
]
<<
" / ((1 + 0.00002 * ( "
<<
inputx1_ptr
[
0
]
<<
"^2 + "
<<
inputx1_ptr
[
4
]
<<
"^2 + "
<<
inputx1_ptr
[
8
]
<<
"^2 ))^0.75) = "
;
DLOG
<<
inputx1_ptr
[
0
]
<<
" / ((1 + 0.00002 * ( "
<<
inputx1_ptr
[
0
]
<<
"^2 + "
<<
inputx1_ptr
[
4
]
<<
"^2 + "
<<
inputx1_ptr
[
8
]
<<
"^2 ))^0.75) = "
;
DLOG
<<
output_lrn_ptr
[
0
];
return
0
;
}
test/operators/test_mul_op.cpp
浏览文件 @
1ad8f821
...
...
@@ -24,7 +24,7 @@ namespace paddle_mobile {
namespace
framework
{
template
<
typename
Dtype
>
class
TestMulOp
{
public:
public:
explicit
TestMulOp
(
const
Program
<
Dtype
>
p
)
:
program_
(
p
)
{
if
(
use_optimize_
)
{
to_predict_program_
=
program_
.
optimizeProgram
;
...
...
@@ -41,8 +41,7 @@ template <typename Dtype> class TestMulOp {
// DLOG << " ops " << ops.size();
for
(
int
j
=
0
;
j
<
ops
.
size
();
++
j
)
{
std
::
shared_ptr
<
OpDesc
>
op
=
ops
[
j
];
if
(
op
->
Type
()
==
"mul"
&&
op
->
Input
(
"X"
)[
0
]
==
"pool2d_0.tmp_0"
)
{
if
(
op
->
Type
()
==
"mul"
&&
op
->
Input
(
"X"
)[
0
]
==
"pool2d_0.tmp_0"
)
{
DLOG
<<
" mul attr size: "
<<
op
->
GetAttrMap
().
size
();
DLOG
<<
" inputs size: "
<<
op
->
GetInputs
().
size
();
DLOG
<<
" outputs size: "
<<
op
->
GetOutputs
().
size
();
...
...
@@ -88,7 +87,7 @@ template <typename Dtype> class TestMulOp {
return
out_tensor
;
}
private:
private:
const
framework
::
Program
<
Dtype
>
program_
;
std
::
shared_ptr
<
ProgramDesc
>
to_predict_program_
;
std
::
map
<
framework
::
BlockDesc
,
...
...
@@ -99,8 +98,7 @@ template <typename Dtype> class TestMulOp {
void
predict_mul
(
const
Tensor
&
t1
,
const
Tensor
&
t2
,
int
block_id
)
{
std
::
shared_ptr
<
BlockDesc
>
to_predict_block
=
to_predict_program_
->
Block
(
block_id
);
for
(
int
j
=
0
;
j
<
ops_of_block_
[
*
to_predict_block
.
get
()].
size
();
++
j
)
{
for
(
int
j
=
0
;
j
<
ops_of_block_
[
*
to_predict_block
.
get
()].
size
();
++
j
)
{
auto
op
=
ops_of_block_
[
*
to_predict_block
.
get
()][
j
];
DLOG
<<
"op -> run()"
;
op
->
Run
();
...
...
test/operators/test_pool_op.cpp
浏览文件 @
1ad8f821
test/test_helper.h
浏览文件 @
1ad8f821
...
...
@@ -30,7 +30,6 @@ void SetupTensor(paddle_mobile::framework::Tensor *input,
T
*
input_ptr
=
input
->
mutable_data
<
T
>
(
dims
);
for
(
int
i
=
0
;
i
<
input
->
numel
();
++
i
)
{
input_ptr
[
i
]
=
static_cast
<
T
>
(
uniform_dist
(
rng
)
*
(
upper
-
lower
)
+
lower
);
input_ptr
[
i
]
=
static_cast
<
T
>
(
uniform_dist
(
rng
)
*
(
upper
-
lower
)
+
lower
);
}
}
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录