提交 1ad8f821 编写于 作者: 朔-望's avatar 朔-望

modify to 2 spaces indent & format code & rm build folder

上级 e35ef6fe
...@@ -2,5 +2,4 @@ ...@@ -2,5 +2,4 @@
Language: Cpp Language: Cpp
BasedOnStyle: LLVM BasedOnStyle: LLVM
Standard: Cpp11 Standard: Cpp11
IndentWidth: 4
... ...
[
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/ddim.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/ddim.cc",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/ddim.cc"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/lod_tensor.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/lod_tensor.cc",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/lod_tensor.cc"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/scope.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/scope.cc",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/scope.cc"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/tensor_util.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/tensor_util.cc",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/tensor_util.cc"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/memory/t_malloc.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/memory/t_malloc.cc",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/memory/t_malloc.cc"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/operators/math/im2col.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/math/im2col.cc",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/operators/math/im2col.cc"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/operators/math/math_function.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/math/math_function.cc",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/operators/math/math_function.cc"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/operators/math/vol2col.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/math/vol2col.cc",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/operators/math/vol2col.cc"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/common/variant.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/common/variant.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/common/variant.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/attribute.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/attribute.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/attribute.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/block_desc.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/block_desc.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/block_desc.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/data_transform.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/data_transform.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/data_transform.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/executor.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/executor.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/executor.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/framework.pb.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/framework.pb.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/framework.pb.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/op_desc.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/op_desc.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/op_desc.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/operator.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/operator.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/operator.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/paddle_mobile_object.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/paddle_mobile_object.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/paddle_mobile_object.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/program.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/program.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/program.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/program_desc.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/program_desc.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/program_desc.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/framework/var_desc.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/var_desc.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/var_desc.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/io.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/io.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/io.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/operators/conv_op.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/conv_op.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/operators/conv_op.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/operators/elementwise_add_op.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/elementwise_add_op.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/operators/elementwise_add_op.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/operators/kernel/arm/conv_kernel.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/arm/conv_kernel.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/arm/conv_kernel.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/operators/kernel/arm/elementwise_add_kernel.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/arm/elementwise_add_kernel.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/arm/elementwise_add_kernel.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/operators/kernel/arm/mul_kernel.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/arm/mul_kernel.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/arm/mul_kernel.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/operators/kernel/fpga/conv_kernel.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/fpga/conv_kernel.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/fpga/conv_kernel.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/operators/mul_op.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/mul_op.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/operators/mul_op.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-static.dir/src/operators/op_param.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/op_param.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/operators/op_param.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/ddim.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/ddim.cc",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/ddim.cc"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/lod_tensor.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/lod_tensor.cc",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/lod_tensor.cc"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/scope.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/scope.cc",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/scope.cc"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/tensor_util.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/tensor_util.cc",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/tensor_util.cc"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/memory/t_malloc.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/memory/t_malloc.cc",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/memory/t_malloc.cc"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/operators/math/im2col.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/math/im2col.cc",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/operators/math/im2col.cc"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/operators/math/math_function.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/math/math_function.cc",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/operators/math/math_function.cc"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/operators/math/vol2col.cc.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/math/vol2col.cc",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/operators/math/vol2col.cc"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/common/variant.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/common/variant.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/common/variant.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/attribute.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/attribute.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/attribute.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/block_desc.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/block_desc.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/block_desc.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/data_transform.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/data_transform.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/data_transform.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/executor.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/executor.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/executor.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/framework.pb.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/framework.pb.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/framework.pb.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/op_desc.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/op_desc.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/op_desc.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/operator.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/operator.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/operator.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/paddle_mobile_object.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/paddle_mobile_object.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/paddle_mobile_object.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/program.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/program.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/program.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/program_desc.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/program_desc.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/program_desc.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/framework/var_desc.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/framework/var_desc.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/framework/var_desc.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/io.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/io.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/io.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/operators/conv_op.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/conv_op.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/operators/conv_op.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/operators/elementwise_add_op.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/elementwise_add_op.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/operators/elementwise_add_op.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/operators/kernel/arm/conv_kernel.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/arm/conv_kernel.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/arm/conv_kernel.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/operators/kernel/arm/elementwise_add_kernel.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/arm/elementwise_add_kernel.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/arm/elementwise_add_kernel.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/operators/kernel/arm/mul_kernel.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/arm/mul_kernel.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/arm/mul_kernel.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/operators/kernel/fpga/conv_kernel.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/fpga/conv_kernel.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/operators/kernel/fpga/conv_kernel.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/operators/mul_op.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/mul_op.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/operators/mul_op.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -Dpaddle_mobile_EXPORTS -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -fPIC -std=c++11 -o CMakeFiles/paddle-mobile.dir/src/operators/op_param.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/src/operators/op_param.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/src/operators/op_param.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release/test",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/test-log.dir/common/test_log.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/test/common/test_log.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/test/common/test_log.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release/test",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/test-conv-op.dir/operators/test_cov_op.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/test/operators/test_cov_op.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/test/operators/test_cov_op.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release/test",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/test-load.dir/framework/test_load.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/test/framework/test_load.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/test/framework/test_load.cpp"
},
{
"directory": "/Users/allonli/Documents/workspace/paddle-mobile/cmake-build-release/test",
"command": "/Library/Developer/CommandLineTools/usr/bin/c++ -DPADDLE_MOBILE_DEBUG=\\\"true\\\" -I/Users/allonli/Documents/workspace/paddle-mobile/src -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/openblas/include -I/Users/allonli/Documents/workspace/paddle-mobile/third-party/protobuf/include -O2 -g -DNDEBUG -std=c++11 -o CMakeFiles/paddle-mobile-test.dir/main.cpp.o -c /Users/allonli/Documents/workspace/paddle-mobile/test/main.cpp",
"file": "/Users/allonli/Documents/workspace/paddle-mobile/test/main.cpp"
}
]
\ No newline at end of file
...@@ -56,7 +56,7 @@ struct Print { ...@@ -56,7 +56,7 @@ struct Print {
return *this; return *this;
} }
private: private:
void print(LogLevel level) { void print(LogLevel level) {
buffer_ << std::endl; buffer_ << std::endl;
if (level == kLOG_ERROR) { if (level == kLOG_ERROR) {
...@@ -73,8 +73,7 @@ struct ToLog { ...@@ -73,8 +73,7 @@ struct ToLog {
: level_(level) { : level_(level) {
unsigned blanks = unsigned blanks =
(unsigned)(level > kLOG_DEBUG ? (level - kLOG_DEBUG) * 4 : 1); (unsigned)(level > kLOG_DEBUG ? (level - kLOG_DEBUG) * 4 : 1);
printer_ << logs[level] << " " << info << ":" printer_ << logs[level] << " " << info << ":" << std::string(blanks, ' ');
<< std::string(blanks, ' ');
} }
template <typename T> ToLog &operator<<(T const &value) { template <typename T> ToLog &operator<<(T const &value) {
...@@ -84,7 +83,7 @@ struct ToLog { ...@@ -84,7 +83,7 @@ struct ToLog {
~ToLog() { printer_.print(level_); } ~ToLog() { printer_.print(level_); }
private: private:
LogLevel level_; LogLevel level_;
Print printer_; Print printer_;
}; };
...@@ -93,10 +92,10 @@ struct ToLog { ...@@ -93,10 +92,10 @@ struct ToLog {
if (level > paddle_mobile::log_level) { \ if (level > paddle_mobile::log_level) { \
} else \ } else \
paddle_mobile::ToLog( \ paddle_mobile::ToLog( \
level, (std::stringstream() \ level, \
(std::stringstream() \
<< "[file: " \ << "[file: " \
<< (strrchr(__FILE__, '/') ? (strrchr(__FILE__, '/') + 1) \ << (strrchr(__FILE__, '/') ? (strrchr(__FILE__, '/') + 1) : __FILE__) \
: __FILE__) \
<< "] [line: " << __LINE__ << "] ") \ << "] [line: " << __LINE__ << "] ") \
.str()) .str())
...@@ -107,8 +106,7 @@ struct ToLog { ...@@ -107,8 +106,7 @@ struct ToLog {
paddle_mobile::kLOG_DEBUG, \ paddle_mobile::kLOG_DEBUG, \
(std::stringstream() \ (std::stringstream() \
<< "[file: " \ << "[file: " \
<< (strrchr(__FILE__, '/') ? (strrchr(__FILE__, '/') + 1) \ << (strrchr(__FILE__, '/') ? (strrchr(__FILE__, '/') + 1) : __FILE__) \
: __FILE__) \
<< "] [line: " << __LINE__ << "] ") \ << "] [line: " << __LINE__ << "] ") \
.str()) .str())
} // namespace paddle_mobile } // namespace paddle_mobile
...@@ -144,7 +142,7 @@ struct Print { ...@@ -144,7 +142,7 @@ struct Print {
friend struct ToLog; friend struct ToLog;
template <typename T> Print &operator<<(T const &value) {} template <typename T> Print &operator<<(T const &value) {}
private: private:
}; };
struct ToLog { struct ToLog {
......
...@@ -49,7 +49,7 @@ template <typename F> struct VariantHelper<F> { ...@@ -49,7 +49,7 @@ template <typename F> struct VariantHelper<F> {
}; };
template <size_t size> class RawData { template <size_t size> class RawData {
public: public:
char data[size]; char data[size];
RawData() {} RawData() {}
RawData(const RawData &raw_data) { strcpy(data, raw_data.data); } RawData(const RawData &raw_data) { strcpy(data, raw_data.data); }
...@@ -87,7 +87,7 @@ template <typename... Ts> struct Variant { ...@@ -87,7 +87,7 @@ template <typename... Ts> struct Variant {
size_t TypeId() const { return type_id; } size_t TypeId() const { return type_id; }
private: private:
static inline size_t invalid_type() { return typeid(void).hash_code(); } static inline size_t invalid_type() { return typeid(void).hash_code(); }
typedef VariantHelper<Ts...> helper; typedef VariantHelper<Ts...> helper;
size_t type_id; size_t type_id;
......
...@@ -27,7 +27,7 @@ namespace framework { ...@@ -27,7 +27,7 @@ namespace framework {
class BlockDesc; class BlockDesc;
class Attribute { class Attribute {
public: public:
static Attribute GetAttrValue(const proto::OpDesc::Attr &attr_desc) { static Attribute GetAttrValue(const proto::OpDesc::Attr &attr_desc) {
// std::cout << "begin get attr value" << std::endl; // std::cout << "begin get attr value" << std::endl;
Attribute attr; Attribute attr;
...@@ -100,7 +100,7 @@ class Attribute { ...@@ -100,7 +100,7 @@ class Attribute {
template <typename T> T &Get() const { return variant_.Get<T>(); } template <typename T> T &Get() const { return variant_.Get<T>(); }
private: private:
Variant<int, float, std::string, std::vector<int>, std::vector<float>, Variant<int, float, std::string, std::vector<int>, std::vector<float>,
std::vector<std::string>, bool, std::vector<bool>, BlockDesc *, std::vector<std::string>, bool, std::vector<bool>, BlockDesc *,
int64_t> int64_t>
...@@ -110,7 +110,7 @@ class Attribute { ...@@ -110,7 +110,7 @@ class Attribute {
using AttributeMap = std::unordered_map<std::string, Attribute>; using AttributeMap = std::unordered_map<std::string, Attribute>;
class AttrReader { class AttrReader {
public: public:
explicit AttrReader(const AttributeMap &attrs) : attrs_(attrs) {} explicit AttrReader(const AttributeMap &attrs) : attrs_(attrs) {}
template <typename T> inline T Get(const std::string &name) const { template <typename T> inline T Get(const std::string &name) const {
...@@ -121,7 +121,7 @@ class AttrReader { ...@@ -121,7 +121,7 @@ class AttrReader {
return ((Attribute)attrs_.at(name)).Get<T>(); return ((Attribute)attrs_.at(name)).Get<T>();
} }
private: private:
const AttributeMap &attrs_; const AttributeMap &attrs_;
}; };
......
...@@ -27,7 +27,7 @@ namespace paddle_mobile { ...@@ -27,7 +27,7 @@ namespace paddle_mobile {
namespace framework { namespace framework {
class BlockDesc : PaddleMobileObject { class BlockDesc : PaddleMobileObject {
public: public:
BlockDesc(const proto::BlockDesc &desc); BlockDesc(const proto::BlockDesc &desc);
const int &ID() const { return desc_.idx(); } const int &ID() const { return desc_.idx(); }
...@@ -35,8 +35,7 @@ class BlockDesc : PaddleMobileObject { ...@@ -35,8 +35,7 @@ class BlockDesc : PaddleMobileObject {
const int &Parent() const { return desc_.parent_idx(); } const int &Parent() const { return desc_.parent_idx(); }
bool operator==(const paddle_mobile::framework::BlockDesc &in_block) const { bool operator==(const paddle_mobile::framework::BlockDesc &in_block) const {
return this->ID() == in_block.ID() && return this->ID() == in_block.ID() && this->Parent() == in_block.Parent();
this->Parent() == in_block.Parent();
} }
bool operator<(const paddle_mobile::framework::BlockDesc &in_block) const { bool operator<(const paddle_mobile::framework::BlockDesc &in_block) const {
...@@ -46,7 +45,7 @@ class BlockDesc : PaddleMobileObject { ...@@ -46,7 +45,7 @@ class BlockDesc : PaddleMobileObject {
std::vector<std::shared_ptr<VarDesc>> Vars() const; std::vector<std::shared_ptr<VarDesc>> Vars() const;
std::vector<std::shared_ptr<OpDesc>> Ops() const; std::vector<std::shared_ptr<OpDesc>> Ops() const;
private: private:
proto::BlockDesc desc_; proto::BlockDesc desc_;
std::vector<std::shared_ptr<OpDesc>> ops_; std::vector<std::shared_ptr<OpDesc>> ops_;
std::unordered_map<std::string, std::shared_ptr<VarDesc>> vars_; std::unordered_map<std::string, std::shared_ptr<VarDesc>> vars_;
......
...@@ -90,26 +90,24 @@ DDim make_ddim(const std::vector<int> &dims) { ...@@ -90,26 +90,24 @@ DDim make_ddim(const std::vector<int> &dims) {
// XXX For some reason, putting this in an anonymous namespace causes // XXX For some reason, putting this in an anonymous namespace causes
// errors // errors
struct DynamicMutableIndexer : Vistor<int64_t &> { struct DynamicMutableIndexer : Vistor<int64_t &> {
public: public:
explicit DynamicMutableIndexer(int idx) : idx_(idx) {} explicit DynamicMutableIndexer(int idx) : idx_(idx) {}
template <int D> int64_t &operator()(Dim<D> &dim) const { template <int D> int64_t &operator()(Dim<D> &dim) const { return dim[idx_]; }
return dim[idx_];
}
private: private:
int idx_; int idx_;
}; };
struct DynamicConstIndexer : public Vistor<int64_t> { struct DynamicConstIndexer : public Vistor<int64_t> {
public: public:
explicit DynamicConstIndexer(int idx) : idx_(idx) {} explicit DynamicConstIndexer(int idx) : idx_(idx) {}
template <int D> int64_t operator()(const Dim<D> &dim) const { template <int D> int64_t operator()(const Dim<D> &dim) const {
return dim[idx_]; return dim[idx_];
} }
private: private:
int idx_; int idx_;
}; };
...@@ -288,7 +286,7 @@ struct OSVistor : Vistor<std::ostream &> { ...@@ -288,7 +286,7 @@ struct OSVistor : Vistor<std::ostream &> {
return os_ << dim; return os_ << dim;
} }
private: private:
std::ostream &os_; std::ostream &os_;
}; };
......
...@@ -123,9 +123,7 @@ template <> struct DimGetter<0> { ...@@ -123,9 +123,7 @@ template <> struct DimGetter<0> {
return d.head; return d.head;
} }
// Return a reference if Dim is mutable // Return a reference if Dim is mutable
template <typename D> HOSTDEVICE static int64_t &impl(D &d) { template <typename D> HOSTDEVICE static int64_t &impl(D &d) { return d.head; }
return d.head;
}
}; };
template <int D> HOSTDEVICE int64_t &indexer(Dim<D> &dim, int idx) { template <int D> HOSTDEVICE int64_t &indexer(Dim<D> &dim, int idx) {
......
...@@ -35,14 +35,14 @@ namespace paddle_mobile { ...@@ -35,14 +35,14 @@ namespace paddle_mobile {
namespace framework { namespace framework {
template <typename Dtype> class Executor { template <typename Dtype> class Executor {
public: public:
Executor(); Executor();
Executor(const Program<Dtype> p); Executor(const Program<Dtype> p);
std::shared_ptr<Tensor> predict(Tensor &t); std::shared_ptr<Tensor> predict(Tensor &t);
public: public:
const framework::Program<Dtype> program_; const framework::Program<Dtype> program_;
std::shared_ptr<ProgramDesc> to_predict_program_; std::shared_ptr<ProgramDesc> to_predict_program_;
......
...@@ -18,74 +18,73 @@ namespace paddle_mobile { ...@@ -18,74 +18,73 @@ namespace paddle_mobile {
namespace framework { namespace framework {
namespace proto { namespace proto {
class OpDesc_AttrDefaultTypeInternal { class OpDesc_AttrDefaultTypeInternal {
public: public:
::google::protobuf::internal::ExplicitlyConstructed<OpDesc_Attr> _instance; ::google::protobuf::internal::ExplicitlyConstructed<OpDesc_Attr> _instance;
} _OpDesc_Attr_default_instance_; } _OpDesc_Attr_default_instance_;
class OpDesc_VarDefaultTypeInternal { class OpDesc_VarDefaultTypeInternal {
public: public:
::google::protobuf::internal::ExplicitlyConstructed<OpDesc_Var> _instance; ::google::protobuf::internal::ExplicitlyConstructed<OpDesc_Var> _instance;
} _OpDesc_Var_default_instance_; } _OpDesc_Var_default_instance_;
class OpDescDefaultTypeInternal { class OpDescDefaultTypeInternal {
public: public:
::google::protobuf::internal::ExplicitlyConstructed<OpDesc> _instance; ::google::protobuf::internal::ExplicitlyConstructed<OpDesc> _instance;
} _OpDesc_default_instance_; } _OpDesc_default_instance_;
class OpProto_VarDefaultTypeInternal { class OpProto_VarDefaultTypeInternal {
public: public:
::google::protobuf::internal::ExplicitlyConstructed<OpProto_Var> _instance; ::google::protobuf::internal::ExplicitlyConstructed<OpProto_Var> _instance;
} _OpProto_Var_default_instance_; } _OpProto_Var_default_instance_;
class OpProto_AttrDefaultTypeInternal { class OpProto_AttrDefaultTypeInternal {
public: public:
::google::protobuf::internal::ExplicitlyConstructed<OpProto_Attr> _instance; ::google::protobuf::internal::ExplicitlyConstructed<OpProto_Attr> _instance;
} _OpProto_Attr_default_instance_; } _OpProto_Attr_default_instance_;
class OpProtoDefaultTypeInternal { class OpProtoDefaultTypeInternal {
public: public:
::google::protobuf::internal::ExplicitlyConstructed<OpProto> _instance; ::google::protobuf::internal::ExplicitlyConstructed<OpProto> _instance;
} _OpProto_default_instance_; } _OpProto_default_instance_;
class VarType_TensorDescDefaultTypeInternal { class VarType_TensorDescDefaultTypeInternal {
public: public:
::google::protobuf::internal::ExplicitlyConstructed<VarType_TensorDesc> ::google::protobuf::internal::ExplicitlyConstructed<VarType_TensorDesc>
_instance; _instance;
} _VarType_TensorDesc_default_instance_; } _VarType_TensorDesc_default_instance_;
class VarType_LoDTensorDescDefaultTypeInternal { class VarType_LoDTensorDescDefaultTypeInternal {
public: public:
::google::protobuf::internal::ExplicitlyConstructed<VarType_LoDTensorDesc> ::google::protobuf::internal::ExplicitlyConstructed<VarType_LoDTensorDesc>
_instance; _instance;
} _VarType_LoDTensorDesc_default_instance_; } _VarType_LoDTensorDesc_default_instance_;
class VarType_LoDTensorArrayDescDefaultTypeInternal { class VarType_LoDTensorArrayDescDefaultTypeInternal {
public: public:
::google::protobuf::internal::ExplicitlyConstructed< ::google::protobuf::internal::ExplicitlyConstructed<
VarType_LoDTensorArrayDesc> VarType_LoDTensorArrayDesc>
_instance; _instance;
} _VarType_LoDTensorArrayDesc_default_instance_; } _VarType_LoDTensorArrayDesc_default_instance_;
class VarType_ReaderDescDefaultTypeInternal { class VarType_ReaderDescDefaultTypeInternal {
public: public:
::google::protobuf::internal::ExplicitlyConstructed<VarType_ReaderDesc> ::google::protobuf::internal::ExplicitlyConstructed<VarType_ReaderDesc>
_instance; _instance;
} _VarType_ReaderDesc_default_instance_; } _VarType_ReaderDesc_default_instance_;
class VarType_ChannelDescDefaultTypeInternal { class VarType_ChannelDescDefaultTypeInternal {
public: public:
::google::protobuf::internal::ExplicitlyConstructed<VarType_ChannelDesc> ::google::protobuf::internal::ExplicitlyConstructed<VarType_ChannelDesc>
_instance; _instance;
} _VarType_ChannelDesc_default_instance_; } _VarType_ChannelDesc_default_instance_;
class VarType_TupleDefaultTypeInternal { class VarType_TupleDefaultTypeInternal {
public: public:
::google::protobuf::internal::ExplicitlyConstructed<VarType_Tuple> ::google::protobuf::internal::ExplicitlyConstructed<VarType_Tuple> _instance;
_instance;
} _VarType_Tuple_default_instance_; } _VarType_Tuple_default_instance_;
class VarTypeDefaultTypeInternal { class VarTypeDefaultTypeInternal {
public: public:
::google::protobuf::internal::ExplicitlyConstructed<VarType> _instance; ::google::protobuf::internal::ExplicitlyConstructed<VarType> _instance;
} _VarType_default_instance_; } _VarType_default_instance_;
class VarDescDefaultTypeInternal { class VarDescDefaultTypeInternal {
public: public:
::google::protobuf::internal::ExplicitlyConstructed<VarDesc> _instance; ::google::protobuf::internal::ExplicitlyConstructed<VarDesc> _instance;
} _VarDesc_default_instance_; } _VarDesc_default_instance_;
class BlockDescDefaultTypeInternal { class BlockDescDefaultTypeInternal {
public: public:
::google::protobuf::internal::ExplicitlyConstructed<BlockDesc> _instance; ::google::protobuf::internal::ExplicitlyConstructed<BlockDesc> _instance;
} _BlockDesc_default_instance_; } _BlockDesc_default_instance_;
class ProgramDescDefaultTypeInternal { class ProgramDescDefaultTypeInternal {
public: public:
::google::protobuf::internal::ExplicitlyConstructed<ProgramDesc> _instance; ::google::protobuf::internal::ExplicitlyConstructed<ProgramDesc> _instance;
} _ProgramDesc_default_instance_; } _ProgramDesc_default_instance_;
...@@ -207,10 +206,9 @@ void TableStruct::InitDefaultsImpl() { ...@@ -207,10 +206,9 @@ void TableStruct::InitDefaultsImpl() {
const_cast<::paddle_mobile::framework::proto::VarType_Tuple *>( const_cast<::paddle_mobile::framework::proto::VarType_Tuple *>(
::paddle_mobile::framework::proto::VarType_Tuple:: ::paddle_mobile::framework::proto::VarType_Tuple::
internal_default_instance()); internal_default_instance());
_VarDesc_default_instance_._instance.get_mutable()->type_ = _VarDesc_default_instance_._instance.get_mutable()
const_cast<::paddle_mobile::framework::proto::VarType *>( ->type_ = const_cast<::paddle_mobile::framework::proto::VarType *>(
::paddle_mobile::framework::proto::VarType:: ::paddle_mobile::framework::proto::VarType::internal_default_instance());
internal_default_instance());
} }
void InitDefaults() { void InitDefaults() {
...@@ -338,8 +336,7 @@ OpDesc_Attr::OpDesc_Attr(const OpDesc_Attr &from) ...@@ -338,8 +336,7 @@ OpDesc_Attr::OpDesc_Attr(const OpDesc_Attr &from)
&::google::protobuf::internal::GetEmptyStringAlreadyInited()); &::google::protobuf::internal::GetEmptyStringAlreadyInited());
if (from.has_s()) { if (from.has_s()) {
s_.AssignWithDefault( s_.AssignWithDefault(
&::google::protobuf::internal::GetEmptyStringAlreadyInited(), &::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.s_);
from.s_);
} }
::memcpy(&type_, &from.type_, ::memcpy(&type_, &from.type_,
static_cast<size_t>(reinterpret_cast<char *>(&block_idx_) - static_cast<size_t>(reinterpret_cast<char *>(&block_idx_) -
...@@ -443,8 +440,8 @@ bool OpDesc_Attr::MergePartialFromCodedStream( ...@@ -443,8 +440,8 @@ bool OpDesc_Attr::MergePartialFromCodedStream(
tag = p.first; tag = p.first;
if (!p.second) if (!p.second)
goto handle_unusual; goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber( switch (
tag)) { ::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// required string name = 1; // required string name = 1;
case 1: { case 1: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
...@@ -463,15 +460,12 @@ bool OpDesc_Attr::MergePartialFromCodedStream( ...@@ -463,15 +460,12 @@ bool OpDesc_Attr::MergePartialFromCodedStream(
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(16u /* 16 & 0xFF */)) { static_cast<::google::protobuf::uint8>(16u /* 16 & 0xFF */)) {
int value; int value;
DO_(( DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::internal::WireFormatLite::ReadPrimitive< int, ::google::protobuf::internal::WireFormatLite::TYPE_ENUM>(
int, ::google::protobuf::internal::WireFormatLite:: input, &value)));
TYPE_ENUM>(input, &value))); if (::paddle_mobile::framework::proto::AttrType_IsValid(value)) {
if (::paddle_mobile::framework::proto::AttrType_IsValid( set_type(
value)) { static_cast<::paddle_mobile::framework::proto::AttrType>(value));
set_type(static_cast<
::paddle_mobile::framework::proto::AttrType>(
value));
} else { } else {
unknown_fields_stream.WriteVarint32(16u); unknown_fields_stream.WriteVarint32(16u);
unknown_fields_stream.WriteVarint32( unknown_fields_stream.WriteVarint32(
...@@ -488,11 +482,10 @@ bool OpDesc_Attr::MergePartialFromCodedStream( ...@@ -488,11 +482,10 @@ bool OpDesc_Attr::MergePartialFromCodedStream(
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(24u /* 24 & 0xFF */)) { static_cast<::google::protobuf::uint8>(24u /* 24 & 0xFF */)) {
set_has_i(); set_has_i();
DO_(( DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::int32, ::google::protobuf::int32,
::google::protobuf::internal::WireFormatLite:: ::google::protobuf::internal::WireFormatLite::TYPE_INT32>(input,
TYPE_INT32>(input, &i_))); &i_)));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -504,10 +497,9 @@ bool OpDesc_Attr::MergePartialFromCodedStream( ...@@ -504,10 +497,9 @@ bool OpDesc_Attr::MergePartialFromCodedStream(
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(37u /* 37 & 0xFF */)) { static_cast<::google::protobuf::uint8>(37u /* 37 & 0xFF */)) {
set_has_f(); set_has_f();
DO_(( DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::internal::WireFormatLite::ReadPrimitive< float, ::google::protobuf::internal::WireFormatLite::TYPE_FLOAT>(
float, ::google::protobuf::internal::WireFormatLite:: input, &f_)));
TYPE_FLOAT>(input, &f_)));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -530,19 +522,18 @@ bool OpDesc_Attr::MergePartialFromCodedStream( ...@@ -530,19 +522,18 @@ bool OpDesc_Attr::MergePartialFromCodedStream(
case 6: { case 6: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(48u /* 48 & 0xFF */)) { static_cast<::google::protobuf::uint8>(48u /* 48 & 0xFF */)) {
DO_((::google::protobuf::internal::WireFormatLite:: DO_((
ReadRepeatedPrimitive<::google::protobuf::int32, ::google::protobuf::internal::WireFormatLite::ReadRepeatedPrimitive<
::google::protobuf::internal:: ::google::protobuf::int32,
WireFormatLite::TYPE_INT32>( ::google::protobuf::internal::WireFormatLite::TYPE_INT32>(
1, 48u, input, this->mutable_ints()))); 1, 48u, input, this->mutable_ints())));
} else if (static_cast<::google::protobuf::uint8>(tag) == } else if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>( static_cast<::google::protobuf::uint8>(50u /* 50 & 0xFF */)) {
50u /* 50 & 0xFF */)) {
DO_((::google::protobuf::internal::WireFormatLite:: DO_((::google::protobuf::internal::WireFormatLite::
ReadPackedPrimitiveNoInline< ReadPackedPrimitiveNoInline<
::google::protobuf::int32, ::google::protobuf::int32,
::google::protobuf::internal::WireFormatLite:: ::google::protobuf::internal::WireFormatLite::TYPE_INT32>(
TYPE_INT32>(input, this->mutable_ints()))); input, this->mutable_ints())));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -553,18 +544,17 @@ bool OpDesc_Attr::MergePartialFromCodedStream( ...@@ -553,18 +544,17 @@ bool OpDesc_Attr::MergePartialFromCodedStream(
case 7: { case 7: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(61u /* 61 & 0xFF */)) { static_cast<::google::protobuf::uint8>(61u /* 61 & 0xFF */)) {
DO_((::google::protobuf::internal::WireFormatLite:: DO_((
ReadRepeatedPrimitive<float, ::google::protobuf::internal::WireFormatLite::ReadRepeatedPrimitive<
::google::protobuf::internal:: float,
WireFormatLite::TYPE_FLOAT>( ::google::protobuf::internal::WireFormatLite::TYPE_FLOAT>(
1, 61u, input, this->mutable_floats()))); 1, 61u, input, this->mutable_floats())));
} else if (static_cast<::google::protobuf::uint8>(tag) == } else if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>( static_cast<::google::protobuf::uint8>(58u /* 58 & 0xFF */)) {
58u /* 58 & 0xFF */)) {
DO_((::google::protobuf::internal::WireFormatLite:: DO_((::google::protobuf::internal::WireFormatLite::
ReadPackedPrimitiveNoInline< ReadPackedPrimitiveNoInline<
float, ::google::protobuf::internal:: float,
WireFormatLite::TYPE_FLOAT>( ::google::protobuf::internal::WireFormatLite::TYPE_FLOAT>(
input, this->mutable_floats()))); input, this->mutable_floats())));
} else { } else {
goto handle_unusual; goto handle_unusual;
...@@ -589,10 +579,9 @@ bool OpDesc_Attr::MergePartialFromCodedStream( ...@@ -589,10 +579,9 @@ bool OpDesc_Attr::MergePartialFromCodedStream(
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(80u /* 80 & 0xFF */)) { static_cast<::google::protobuf::uint8>(80u /* 80 & 0xFF */)) {
set_has_b(); set_has_b();
DO_(( DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::internal::WireFormatLite::ReadPrimitive< bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>(
bool, ::google::protobuf::internal::WireFormatLite:: input, &b_)));
TYPE_BOOL>(input, &b_)));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -603,18 +592,16 @@ bool OpDesc_Attr::MergePartialFromCodedStream( ...@@ -603,18 +592,16 @@ bool OpDesc_Attr::MergePartialFromCodedStream(
case 11: { case 11: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(88u /* 88 & 0xFF */)) { static_cast<::google::protobuf::uint8>(88u /* 88 & 0xFF */)) {
DO_((::google::protobuf::internal::WireFormatLite:: DO_((
ReadRepeatedPrimitive<bool, ::google::protobuf::internal::WireFormatLite::ReadRepeatedPrimitive<
::google::protobuf::internal:: bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>(
WireFormatLite::TYPE_BOOL>(
1, 88u, input, this->mutable_bools()))); 1, 88u, input, this->mutable_bools())));
} else if (static_cast<::google::protobuf::uint8>(tag) == } else if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>( static_cast<::google::protobuf::uint8>(90u /* 90 & 0xFF */)) {
90u /* 90 & 0xFF */)) {
DO_((::google::protobuf::internal::WireFormatLite:: DO_((::google::protobuf::internal::WireFormatLite::
ReadPackedPrimitiveNoInline< ReadPackedPrimitiveNoInline<
bool, ::google::protobuf::internal:: bool,
WireFormatLite::TYPE_BOOL>( ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>(
input, this->mutable_bools()))); input, this->mutable_bools())));
} else { } else {
goto handle_unusual; goto handle_unusual;
...@@ -627,11 +614,10 @@ bool OpDesc_Attr::MergePartialFromCodedStream( ...@@ -627,11 +614,10 @@ bool OpDesc_Attr::MergePartialFromCodedStream(
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(96u /* 96 & 0xFF */)) { static_cast<::google::protobuf::uint8>(96u /* 96 & 0xFF */)) {
set_has_block_idx(); set_has_block_idx();
DO_(( DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::int32, ::google::protobuf::int32,
::google::protobuf::internal::WireFormatLite:: ::google::protobuf::internal::WireFormatLite::TYPE_INT32>(
TYPE_INT32>(input, &block_idx_))); input, &block_idx_)));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -643,11 +629,10 @@ bool OpDesc_Attr::MergePartialFromCodedStream( ...@@ -643,11 +629,10 @@ bool OpDesc_Attr::MergePartialFromCodedStream(
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(104u /* 104 & 0xFF */)) { static_cast<::google::protobuf::uint8>(104u /* 104 & 0xFF */)) {
set_has_l(); set_has_l();
DO_(( DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::int64, ::google::protobuf::int64,
::google::protobuf::internal::WireFormatLite:: ::google::protobuf::internal::WireFormatLite::TYPE_INT64>(input,
TYPE_INT64>(input, &l_))); &l_)));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -713,14 +698,14 @@ void OpDesc_Attr::SerializeWithCachedSizes( ...@@ -713,14 +698,14 @@ void OpDesc_Attr::SerializeWithCachedSizes(
// repeated int32 ints = 6; // repeated int32 ints = 6;
for (int i = 0, n = this->ints_size(); i < n; i++) { for (int i = 0, n = this->ints_size(); i < n; i++) {
::google::protobuf::internal::WireFormatLite::WriteInt32( ::google::protobuf::internal::WireFormatLite::WriteInt32(6, this->ints(i),
6, this->ints(i), output); output);
} }
// repeated float floats = 7; // repeated float floats = 7;
for (int i = 0, n = this->floats_size(); i < n; i++) { for (int i = 0, n = this->floats_size(); i < n; i++) {
::google::protobuf::internal::WireFormatLite::WriteFloat( ::google::protobuf::internal::WireFormatLite::WriteFloat(7, this->floats(i),
7, this->floats(i), output); output);
} }
// repeated string strings = 8; // repeated string strings = 8;
...@@ -737,8 +722,8 @@ void OpDesc_Attr::SerializeWithCachedSizes( ...@@ -737,8 +722,8 @@ void OpDesc_Attr::SerializeWithCachedSizes(
// repeated bool bools = 11; // repeated bool bools = 11;
for (int i = 0, n = this->bools_size(); i < n; i++) { for (int i = 0, n = this->bools_size(); i < n; i++) {
::google::protobuf::internal::WireFormatLite::WriteBool( ::google::protobuf::internal::WireFormatLite::WriteBool(11, this->bools(i),
11, this->bools(i), output); output);
} }
// optional int32 block_idx = 12; // optional int32 block_idx = 12;
...@@ -765,16 +750,14 @@ size_t OpDesc_Attr::RequiredFieldsByteSizeFallback() const { ...@@ -765,16 +750,14 @@ size_t OpDesc_Attr::RequiredFieldsByteSizeFallback() const {
if (has_name()) { if (has_name()) {
// required string name = 1; // required string name = 1;
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::StringSize(
1 + ::google::protobuf::internal::WireFormatLite::StringSize(
this->name()); this->name());
} }
if (has_type()) { if (has_type()) {
// required .paddle_mobile.framework.proto.AttrType type = // required .paddle_mobile.framework.proto.AttrType type =
// 2; // 2;
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::EnumSize(
1 + ::google::protobuf::internal::WireFormatLite::EnumSize(
this->type()); this->type());
} }
...@@ -789,14 +772,12 @@ size_t OpDesc_Attr::ByteSizeLong() const { ...@@ -789,14 +772,12 @@ size_t OpDesc_Attr::ByteSizeLong() const {
if (((_has_bits_[0] & 0x00000005) ^ 0x00000005) == if (((_has_bits_[0] & 0x00000005) ^ 0x00000005) ==
0) { // All required fields are present. 0) { // All required fields are present.
// required string name = 1; // required string name = 1;
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::StringSize(
1 + ::google::protobuf::internal::WireFormatLite::StringSize(
this->name()); this->name());
// required .paddle_mobile.framework.proto.AttrType type = // required .paddle_mobile.framework.proto.AttrType type =
// 2; // 2;
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::EnumSize(
1 + ::google::protobuf::internal::WireFormatLite::EnumSize(
this->type()); this->type());
} else { } else {
...@@ -805,8 +786,7 @@ size_t OpDesc_Attr::ByteSizeLong() const { ...@@ -805,8 +786,7 @@ size_t OpDesc_Attr::ByteSizeLong() const {
// repeated int32 ints = 6; // repeated int32 ints = 6;
{ {
size_t data_size = size_t data_size =
::google::protobuf::internal::WireFormatLite::Int32Size( ::google::protobuf::internal::WireFormatLite::Int32Size(this->ints_);
this->ints_);
total_size += total_size +=
1 * ::google::protobuf::internal::FromIntSize(this->ints_size()); 1 * ::google::protobuf::internal::FromIntSize(this->ints_size());
total_size += data_size; total_size += data_size;
...@@ -841,15 +821,13 @@ size_t OpDesc_Attr::ByteSizeLong() const { ...@@ -841,15 +821,13 @@ size_t OpDesc_Attr::ByteSizeLong() const {
// optional string s = 5; // optional string s = 5;
if (has_s()) { if (has_s()) {
total_size += total_size +=
1 + 1 + ::google::protobuf::internal::WireFormatLite::StringSize(this->s());
::google::protobuf::internal::WireFormatLite::StringSize(this->s());
} }
if (_has_bits_[0 / 32] & 248u) { if (_has_bits_[0 / 32] & 248u) {
// optional int32 i = 3; // optional int32 i = 3;
if (has_i()) { if (has_i()) {
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::Int32Size(
1 + ::google::protobuf::internal::WireFormatLite::Int32Size(
this->i()); this->i());
} }
...@@ -865,15 +843,13 @@ size_t OpDesc_Attr::ByteSizeLong() const { ...@@ -865,15 +843,13 @@ size_t OpDesc_Attr::ByteSizeLong() const {
// optional int64 l = 13; // optional int64 l = 13;
if (has_l()) { if (has_l()) {
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::Int64Size(
1 + ::google::protobuf::internal::WireFormatLite::Int64Size(
this->l()); this->l());
} }
// optional int32 block_idx = 12; // optional int32 block_idx = 12;
if (has_block_idx()) { if (has_block_idx()) {
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::Int32Size(
1 + ::google::protobuf::internal::WireFormatLite::Int32Size(
this->block_idx()); this->block_idx());
} }
} }
...@@ -998,15 +974,14 @@ const ::std::string &OpDesc_Attr::name() const { ...@@ -998,15 +974,14 @@ const ::std::string &OpDesc_Attr::name() const {
} }
void OpDesc_Attr::set_name(const ::std::string &value) { void OpDesc_Attr::set_name(const ::std::string &value) {
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); value);
// @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpDesc.Attr.name) // @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpDesc.Attr.name)
} }
#if LANG_CXX11 #if LANG_CXX11
void OpDesc_Attr::set_name(::std::string &&value) { void OpDesc_Attr::set_name(::std::string &&value) {
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::move(value)); ::std::move(value));
// @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpDesc.Attr.name) // @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpDesc.Attr.name)
} }
...@@ -1014,15 +989,13 @@ void OpDesc_Attr::set_name(::std::string &&value) { ...@@ -1014,15 +989,13 @@ void OpDesc_Attr::set_name(::std::string &&value) {
void OpDesc_Attr::set_name(const char *value) { void OpDesc_Attr::set_name(const char *value) {
GOOGLE_DCHECK(value != NULL); GOOGLE_DCHECK(value != NULL);
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(value)); ::std::string(value));
// @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpDesc.Attr.name) // @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpDesc.Attr.name)
} }
void OpDesc_Attr::set_name(const char *value, size_t size) { void OpDesc_Attr::set_name(const char *value, size_t size) {
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(reinterpret_cast<const char *>(value), size)); ::std::string(reinterpret_cast<const char *>(value), size));
// @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpDesc.Attr.name) // @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpDesc.Attr.name)
} }
...@@ -1246,8 +1219,7 @@ void OpDesc_Attr::set_strings(int index, const char *value) { ...@@ -1246,8 +1219,7 @@ void OpDesc_Attr::set_strings(int index, const char *value) {
// @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpDesc.Attr.strings) // @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpDesc.Attr.strings)
} }
void OpDesc_Attr::set_strings(int index, const char *value, size_t size) { void OpDesc_Attr::set_strings(int index, const char *value, size_t size) {
strings_.Mutable(index)->assign(reinterpret_cast<const char *>(value), strings_.Mutable(index)->assign(reinterpret_cast<const char *>(value), size);
size);
// @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpDesc.Attr.strings) // @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpDesc.Attr.strings)
} }
::std::string *OpDesc_Attr::add_strings() { ::std::string *OpDesc_Attr::add_strings() {
...@@ -1466,8 +1438,8 @@ bool OpDesc_Var::MergePartialFromCodedStream( ...@@ -1466,8 +1438,8 @@ bool OpDesc_Var::MergePartialFromCodedStream(
tag = p.first; tag = p.first;
if (!p.second) if (!p.second)
goto handle_unusual; goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber( switch (
tag)) { ::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// required string parameter = 1; // required string parameter = 1;
case 1: { case 1: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
...@@ -1545,8 +1517,7 @@ size_t OpDesc_Var::ByteSizeLong() const { ...@@ -1545,8 +1517,7 @@ size_t OpDesc_Var::ByteSizeLong() const {
// required string parameter = 1; // required string parameter = 1;
if (has_parameter()) { if (has_parameter()) {
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::StringSize(
1 + ::google::protobuf::internal::WireFormatLite::StringSize(
this->parameter()); this->parameter());
} }
// repeated string arguments = 2; // repeated string arguments = 2;
...@@ -1684,8 +1655,7 @@ void OpDesc_Var::set_allocated_parameter(::std::string *parameter) { ...@@ -1684,8 +1655,7 @@ void OpDesc_Var::set_allocated_parameter(::std::string *parameter) {
clear_has_parameter(); clear_has_parameter();
} }
parameter_.SetAllocatedNoArena( parameter_.SetAllocatedNoArena(
&::google::protobuf::internal::GetEmptyStringAlreadyInited(), &::google::protobuf::internal::GetEmptyStringAlreadyInited(), parameter);
parameter);
// @@protoc_insertion_point(field_set_allocated:paddle_mobile.framework.proto.OpDesc.Var.parameter) // @@protoc_insertion_point(field_set_allocated:paddle_mobile.framework.proto.OpDesc.Var.parameter)
} }
...@@ -1864,15 +1834,15 @@ bool OpDesc::MergePartialFromCodedStream( ...@@ -1864,15 +1834,15 @@ bool OpDesc::MergePartialFromCodedStream(
tag = p.first; tag = p.first;
if (!p.second) if (!p.second)
goto handle_unusual; goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber( switch (
tag)) { ::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// repeated .paddle_mobile.framework.proto.OpDesc.Var inputs // repeated .paddle_mobile.framework.proto.OpDesc.Var inputs
// = 1; // = 1;
case 1: { case 1: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(10u /* 10 & 0xFF */)) { static_cast<::google::protobuf::uint8>(10u /* 10 & 0xFF */)) {
DO_(::google::protobuf::internal::WireFormatLite:: DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
ReadMessageNoVirtual(input, add_inputs())); input, add_inputs()));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -1884,8 +1854,8 @@ bool OpDesc::MergePartialFromCodedStream( ...@@ -1884,8 +1854,8 @@ bool OpDesc::MergePartialFromCodedStream(
case 2: { case 2: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(18u /* 18 & 0xFF */)) { static_cast<::google::protobuf::uint8>(18u /* 18 & 0xFF */)) {
DO_(::google::protobuf::internal::WireFormatLite:: DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
ReadMessageNoVirtual(input, add_outputs())); input, add_outputs()));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -1909,8 +1879,8 @@ bool OpDesc::MergePartialFromCodedStream( ...@@ -1909,8 +1879,8 @@ bool OpDesc::MergePartialFromCodedStream(
case 4: { case 4: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(34u /* 34 & 0xFF */)) { static_cast<::google::protobuf::uint8>(34u /* 34 & 0xFF */)) {
DO_(::google::protobuf::internal::WireFormatLite:: DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
ReadMessageNoVirtual(input, add_attrs())); input, add_attrs()));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -1922,10 +1892,9 @@ bool OpDesc::MergePartialFromCodedStream( ...@@ -1922,10 +1892,9 @@ bool OpDesc::MergePartialFromCodedStream(
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(40u /* 40 & 0xFF */)) { static_cast<::google::protobuf::uint8>(40u /* 40 & 0xFF */)) {
set_has_is_target(); set_has_is_target();
DO_(( DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::internal::WireFormatLite::ReadPrimitive< bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>(
bool, ::google::protobuf::internal::WireFormatLite:: input, &is_target_)));
TYPE_BOOL>(input, &is_target_)));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -1968,8 +1937,7 @@ void OpDesc::SerializeWithCachedSizes( ...@@ -1968,8 +1937,7 @@ void OpDesc::SerializeWithCachedSizes(
// repeated .paddle_mobile.framework.proto.OpDesc.Var outputs = // repeated .paddle_mobile.framework.proto.OpDesc.Var outputs =
// 2; // 2;
for (unsigned int i = 0, for (unsigned int i = 0, n = static_cast<unsigned int>(this->outputs_size());
n = static_cast<unsigned int>(this->outputs_size());
i < n; i++) { i < n; i++) {
::google::protobuf::internal::WireFormatLite::WriteMessage( ::google::protobuf::internal::WireFormatLite::WriteMessage(
2, this->outputs(static_cast<int>(i)), output); 2, this->outputs(static_cast<int>(i)), output);
...@@ -2010,8 +1978,7 @@ size_t OpDesc::ByteSizeLong() const { ...@@ -2010,8 +1978,7 @@ size_t OpDesc::ByteSizeLong() const {
// required string type = 3; // required string type = 3;
if (has_type()) { if (has_type()) {
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::StringSize(
1 + ::google::protobuf::internal::WireFormatLite::StringSize(
this->type()); this->type());
} }
// repeated .paddle_mobile.framework.proto.OpDesc.Var inputs = // repeated .paddle_mobile.framework.proto.OpDesc.Var inputs =
...@@ -2020,8 +1987,9 @@ size_t OpDesc::ByteSizeLong() const { ...@@ -2020,8 +1987,9 @@ size_t OpDesc::ByteSizeLong() const {
unsigned int count = static_cast<unsigned int>(this->inputs_size()); unsigned int count = static_cast<unsigned int>(this->inputs_size());
total_size += 1UL * count; total_size += 1UL * count;
for (unsigned int i = 0; i < count; i++) { for (unsigned int i = 0; i < count; i++) {
total_size += ::google::protobuf::internal::WireFormatLite:: total_size +=
MessageSizeNoVirtual(this->inputs(static_cast<int>(i))); ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->inputs(static_cast<int>(i)));
} }
} }
...@@ -2031,8 +1999,9 @@ size_t OpDesc::ByteSizeLong() const { ...@@ -2031,8 +1999,9 @@ size_t OpDesc::ByteSizeLong() const {
unsigned int count = static_cast<unsigned int>(this->outputs_size()); unsigned int count = static_cast<unsigned int>(this->outputs_size());
total_size += 1UL * count; total_size += 1UL * count;
for (unsigned int i = 0; i < count; i++) { for (unsigned int i = 0; i < count; i++) {
total_size += ::google::protobuf::internal::WireFormatLite:: total_size +=
MessageSizeNoVirtual(this->outputs(static_cast<int>(i))); ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->outputs(static_cast<int>(i)));
} }
} }
...@@ -2042,8 +2011,9 @@ size_t OpDesc::ByteSizeLong() const { ...@@ -2042,8 +2011,9 @@ size_t OpDesc::ByteSizeLong() const {
unsigned int count = static_cast<unsigned int>(this->attrs_size()); unsigned int count = static_cast<unsigned int>(this->attrs_size());
total_size += 1UL * count; total_size += 1UL * count;
for (unsigned int i = 0; i < count; i++) { for (unsigned int i = 0; i < count; i++) {
total_size += ::google::protobuf::internal::WireFormatLite:: total_size +=
MessageSizeNoVirtual(this->attrs(static_cast<int>(i))); ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->attrs(static_cast<int>(i)));
} }
} }
...@@ -2148,15 +2118,14 @@ const ::std::string &OpDesc::type() const { ...@@ -2148,15 +2118,14 @@ const ::std::string &OpDesc::type() const {
} }
void OpDesc::set_type(const ::std::string &value) { void OpDesc::set_type(const ::std::string &value) {
set_has_type(); set_has_type();
type_.SetNoArena( type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); value);
// @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpDesc.type) // @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpDesc.type)
} }
#if LANG_CXX11 #if LANG_CXX11
void OpDesc::set_type(::std::string &&value) { void OpDesc::set_type(::std::string &&value) {
set_has_type(); set_has_type();
type_.SetNoArena( type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::move(value)); ::std::move(value));
// @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpDesc.type) // @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpDesc.type)
} }
...@@ -2164,15 +2133,13 @@ void OpDesc::set_type(::std::string &&value) { ...@@ -2164,15 +2133,13 @@ void OpDesc::set_type(::std::string &&value) {
void OpDesc::set_type(const char *value) { void OpDesc::set_type(const char *value) {
GOOGLE_DCHECK(value != NULL); GOOGLE_DCHECK(value != NULL);
set_has_type(); set_has_type();
type_.SetNoArena( type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(value)); ::std::string(value));
// @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpDesc.type) // @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpDesc.type)
} }
void OpDesc::set_type(const char *value, size_t size) { void OpDesc::set_type(const char *value, size_t size) {
set_has_type(); set_has_type();
type_.SetNoArena( type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(reinterpret_cast<const char *>(value), size)); ::std::string(reinterpret_cast<const char *>(value), size));
// @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpDesc.type) // @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpDesc.type)
} }
...@@ -2445,8 +2412,8 @@ bool OpProto_Var::MergePartialFromCodedStream( ...@@ -2445,8 +2412,8 @@ bool OpProto_Var::MergePartialFromCodedStream(
tag = p.first; tag = p.first;
if (!p.second) if (!p.second)
goto handle_unusual; goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber( switch (
tag)) { ::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// required string name = 1; // required string name = 1;
case 1: { case 1: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
...@@ -2476,10 +2443,9 @@ bool OpProto_Var::MergePartialFromCodedStream( ...@@ -2476,10 +2443,9 @@ bool OpProto_Var::MergePartialFromCodedStream(
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(24u /* 24 & 0xFF */)) { static_cast<::google::protobuf::uint8>(24u /* 24 & 0xFF */)) {
set_has_duplicable(); set_has_duplicable();
DO_(( DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::internal::WireFormatLite::ReadPrimitive< bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>(
bool, ::google::protobuf::internal::WireFormatLite:: input, &duplicable_)));
TYPE_BOOL>(input, &duplicable_)));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -2491,10 +2457,9 @@ bool OpProto_Var::MergePartialFromCodedStream( ...@@ -2491,10 +2457,9 @@ bool OpProto_Var::MergePartialFromCodedStream(
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(32u /* 32 & 0xFF */)) { static_cast<::google::protobuf::uint8>(32u /* 32 & 0xFF */)) {
set_has_intermediate(); set_has_intermediate();
DO_(( DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::internal::WireFormatLite::ReadPrimitive< bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>(
bool, ::google::protobuf::internal::WireFormatLite:: input, &intermediate_)));
TYPE_BOOL>(input, &intermediate_)));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -2506,10 +2471,9 @@ bool OpProto_Var::MergePartialFromCodedStream( ...@@ -2506,10 +2471,9 @@ bool OpProto_Var::MergePartialFromCodedStream(
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(40u /* 40 & 0xFF */)) { static_cast<::google::protobuf::uint8>(40u /* 40 & 0xFF */)) {
set_has_dispensable(); set_has_dispensable();
DO_(( DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::internal::WireFormatLite::ReadPrimitive< bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>(
bool, ::google::protobuf::internal::WireFormatLite:: input, &dispensable_)));
TYPE_BOOL>(input, &dispensable_)));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -2585,15 +2549,13 @@ size_t OpProto_Var::RequiredFieldsByteSizeFallback() const { ...@@ -2585,15 +2549,13 @@ size_t OpProto_Var::RequiredFieldsByteSizeFallback() const {
if (has_name()) { if (has_name()) {
// required string name = 1; // required string name = 1;
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::StringSize(
1 + ::google::protobuf::internal::WireFormatLite::StringSize(
this->name()); this->name());
} }
if (has_comment()) { if (has_comment()) {
// required string comment = 2; // required string comment = 2;
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::StringSize(
1 + ::google::protobuf::internal::WireFormatLite::StringSize(
this->comment()); this->comment());
} }
...@@ -2608,13 +2570,11 @@ size_t OpProto_Var::ByteSizeLong() const { ...@@ -2608,13 +2570,11 @@ size_t OpProto_Var::ByteSizeLong() const {
if (((_has_bits_[0] & 0x00000003) ^ 0x00000003) == if (((_has_bits_[0] & 0x00000003) ^ 0x00000003) ==
0) { // All required fields are present. 0) { // All required fields are present.
// required string name = 1; // required string name = 1;
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::StringSize(
1 + ::google::protobuf::internal::WireFormatLite::StringSize(
this->name()); this->name());
// required string comment = 2; // required string comment = 2;
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::StringSize(
1 + ::google::protobuf::internal::WireFormatLite::StringSize(
this->comment()); this->comment());
} else { } else {
...@@ -2737,15 +2697,14 @@ const ::std::string &OpProto_Var::name() const { ...@@ -2737,15 +2697,14 @@ const ::std::string &OpProto_Var::name() const {
} }
void OpProto_Var::set_name(const ::std::string &value) { void OpProto_Var::set_name(const ::std::string &value) {
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); value);
// @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpProto.Var.name) // @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpProto.Var.name)
} }
#if LANG_CXX11 #if LANG_CXX11
void OpProto_Var::set_name(::std::string &&value) { void OpProto_Var::set_name(::std::string &&value) {
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::move(value)); ::std::move(value));
// @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpProto.Var.name) // @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpProto.Var.name)
} }
...@@ -2753,15 +2712,13 @@ void OpProto_Var::set_name(::std::string &&value) { ...@@ -2753,15 +2712,13 @@ void OpProto_Var::set_name(::std::string &&value) {
void OpProto_Var::set_name(const char *value) { void OpProto_Var::set_name(const char *value) {
GOOGLE_DCHECK(value != NULL); GOOGLE_DCHECK(value != NULL);
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(value)); ::std::string(value));
// @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpProto.Var.name) // @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpProto.Var.name)
} }
void OpProto_Var::set_name(const char *value, size_t size) { void OpProto_Var::set_name(const char *value, size_t size) {
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(reinterpret_cast<const char *>(value), size)); ::std::string(reinterpret_cast<const char *>(value), size));
// @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpProto.Var.name) // @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpProto.Var.name)
} }
...@@ -3051,8 +3008,8 @@ bool OpProto_Attr::MergePartialFromCodedStream( ...@@ -3051,8 +3008,8 @@ bool OpProto_Attr::MergePartialFromCodedStream(
tag = p.first; tag = p.first;
if (!p.second) if (!p.second)
goto handle_unusual; goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber( switch (
tag)) { ::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// required string name = 1; // required string name = 1;
case 1: { case 1: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
...@@ -3071,15 +3028,12 @@ bool OpProto_Attr::MergePartialFromCodedStream( ...@@ -3071,15 +3028,12 @@ bool OpProto_Attr::MergePartialFromCodedStream(
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(16u /* 16 & 0xFF */)) { static_cast<::google::protobuf::uint8>(16u /* 16 & 0xFF */)) {
int value; int value;
DO_(( DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::internal::WireFormatLite::ReadPrimitive< int, ::google::protobuf::internal::WireFormatLite::TYPE_ENUM>(
int, ::google::protobuf::internal::WireFormatLite:: input, &value)));
TYPE_ENUM>(input, &value))); if (::paddle_mobile::framework::proto::AttrType_IsValid(value)) {
if (::paddle_mobile::framework::proto::AttrType_IsValid( set_type(
value)) { static_cast<::paddle_mobile::framework::proto::AttrType>(value));
set_type(static_cast<
::paddle_mobile::framework::proto::AttrType>(
value));
} else { } else {
unknown_fields_stream.WriteVarint32(16u); unknown_fields_stream.WriteVarint32(16u);
unknown_fields_stream.WriteVarint32( unknown_fields_stream.WriteVarint32(
...@@ -3108,10 +3062,9 @@ bool OpProto_Attr::MergePartialFromCodedStream( ...@@ -3108,10 +3062,9 @@ bool OpProto_Attr::MergePartialFromCodedStream(
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(32u /* 32 & 0xFF */)) { static_cast<::google::protobuf::uint8>(32u /* 32 & 0xFF */)) {
set_has_generated(); set_has_generated();
DO_(( DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::internal::WireFormatLite::ReadPrimitive< bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>(
bool, ::google::protobuf::internal::WireFormatLite:: input, &generated_)));
TYPE_BOOL>(input, &generated_)));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -3181,23 +3134,20 @@ size_t OpProto_Attr::RequiredFieldsByteSizeFallback() const { ...@@ -3181,23 +3134,20 @@ size_t OpProto_Attr::RequiredFieldsByteSizeFallback() const {
if (has_name()) { if (has_name()) {
// required string name = 1; // required string name = 1;
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::StringSize(
1 + ::google::protobuf::internal::WireFormatLite::StringSize(
this->name()); this->name());
} }
if (has_comment()) { if (has_comment()) {
// required string comment = 3; // required string comment = 3;
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::StringSize(
1 + ::google::protobuf::internal::WireFormatLite::StringSize(
this->comment()); this->comment());
} }
if (has_type()) { if (has_type()) {
// required .paddle_mobile.framework.proto.AttrType type = // required .paddle_mobile.framework.proto.AttrType type =
// 2; // 2;
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::EnumSize(
1 + ::google::protobuf::internal::WireFormatLite::EnumSize(
this->type()); this->type());
} }
...@@ -3212,19 +3162,16 @@ size_t OpProto_Attr::ByteSizeLong() const { ...@@ -3212,19 +3162,16 @@ size_t OpProto_Attr::ByteSizeLong() const {
if (((_has_bits_[0] & 0x00000007) ^ 0x00000007) == if (((_has_bits_[0] & 0x00000007) ^ 0x00000007) ==
0) { // All required fields are present. 0) { // All required fields are present.
// required string name = 1; // required string name = 1;
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::StringSize(
1 + ::google::protobuf::internal::WireFormatLite::StringSize(
this->name()); this->name());
// required string comment = 3; // required string comment = 3;
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::StringSize(
1 + ::google::protobuf::internal::WireFormatLite::StringSize(
this->comment()); this->comment());
// required .paddle_mobile.framework.proto.AttrType type = // required .paddle_mobile.framework.proto.AttrType type =
// 2; // 2;
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::EnumSize(
1 + ::google::protobuf::internal::WireFormatLite::EnumSize(
this->type()); this->type());
} else { } else {
...@@ -3332,15 +3279,14 @@ const ::std::string &OpProto_Attr::name() const { ...@@ -3332,15 +3279,14 @@ const ::std::string &OpProto_Attr::name() const {
} }
void OpProto_Attr::set_name(const ::std::string &value) { void OpProto_Attr::set_name(const ::std::string &value) {
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); value);
// @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpProto.Attr.name) // @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpProto.Attr.name)
} }
#if LANG_CXX11 #if LANG_CXX11
void OpProto_Attr::set_name(::std::string &&value) { void OpProto_Attr::set_name(::std::string &&value) {
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::move(value)); ::std::move(value));
// @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpProto.Attr.name) // @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpProto.Attr.name)
} }
...@@ -3348,15 +3294,13 @@ void OpProto_Attr::set_name(::std::string &&value) { ...@@ -3348,15 +3294,13 @@ void OpProto_Attr::set_name(::std::string &&value) {
void OpProto_Attr::set_name(const char *value) { void OpProto_Attr::set_name(const char *value) {
GOOGLE_DCHECK(value != NULL); GOOGLE_DCHECK(value != NULL);
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(value)); ::std::string(value));
// @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpProto.Attr.name) // @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpProto.Attr.name)
} }
void OpProto_Attr::set_name(const char *value, size_t size) { void OpProto_Attr::set_name(const char *value, size_t size) {
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(reinterpret_cast<const char *>(value), size)); ::std::string(reinterpret_cast<const char *>(value), size));
// @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpProto.Attr.name) // @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpProto.Attr.name)
} }
...@@ -3618,8 +3562,8 @@ bool OpProto::MergePartialFromCodedStream( ...@@ -3618,8 +3562,8 @@ bool OpProto::MergePartialFromCodedStream(
tag = p.first; tag = p.first;
if (!p.second) if (!p.second)
goto handle_unusual; goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber( switch (
tag)) { ::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// required string type = 1; // required string type = 1;
case 1: { case 1: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
...@@ -3637,8 +3581,8 @@ bool OpProto::MergePartialFromCodedStream( ...@@ -3637,8 +3581,8 @@ bool OpProto::MergePartialFromCodedStream(
case 2: { case 2: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(18u /* 18 & 0xFF */)) { static_cast<::google::protobuf::uint8>(18u /* 18 & 0xFF */)) {
DO_(::google::protobuf::internal::WireFormatLite:: DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
ReadMessageNoVirtual(input, add_inputs())); input, add_inputs()));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -3650,8 +3594,8 @@ bool OpProto::MergePartialFromCodedStream( ...@@ -3650,8 +3594,8 @@ bool OpProto::MergePartialFromCodedStream(
case 3: { case 3: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(26u /* 26 & 0xFF */)) { static_cast<::google::protobuf::uint8>(26u /* 26 & 0xFF */)) {
DO_(::google::protobuf::internal::WireFormatLite:: DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
ReadMessageNoVirtual(input, add_outputs())); input, add_outputs()));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -3663,8 +3607,8 @@ bool OpProto::MergePartialFromCodedStream( ...@@ -3663,8 +3607,8 @@ bool OpProto::MergePartialFromCodedStream(
case 4: { case 4: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(34u /* 34 & 0xFF */)) { static_cast<::google::protobuf::uint8>(34u /* 34 & 0xFF */)) {
DO_(::google::protobuf::internal::WireFormatLite:: DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
ReadMessageNoVirtual(input, add_attrs())); input, add_attrs()));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -3726,8 +3670,7 @@ void OpProto::SerializeWithCachedSizes( ...@@ -3726,8 +3670,7 @@ void OpProto::SerializeWithCachedSizes(
// repeated .paddle_mobile.framework.proto.OpProto.Var outputs = // repeated .paddle_mobile.framework.proto.OpProto.Var outputs =
// 3; // 3;
for (unsigned int i = 0, for (unsigned int i = 0, n = static_cast<unsigned int>(this->outputs_size());
n = static_cast<unsigned int>(this->outputs_size());
i < n; i++) { i < n; i++) {
::google::protobuf::internal::WireFormatLite::WriteMessage( ::google::protobuf::internal::WireFormatLite::WriteMessage(
3, this->outputs(static_cast<int>(i)), output); 3, this->outputs(static_cast<int>(i)), output);
...@@ -3759,15 +3702,13 @@ size_t OpProto::RequiredFieldsByteSizeFallback() const { ...@@ -3759,15 +3702,13 @@ size_t OpProto::RequiredFieldsByteSizeFallback() const {
if (has_type()) { if (has_type()) {
// required string type = 1; // required string type = 1;
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::StringSize(
1 + ::google::protobuf::internal::WireFormatLite::StringSize(
this->type()); this->type());
} }
if (has_comment()) { if (has_comment()) {
// required string comment = 5; // required string comment = 5;
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::StringSize(
1 + ::google::protobuf::internal::WireFormatLite::StringSize(
this->comment()); this->comment());
} }
...@@ -3782,13 +3723,11 @@ size_t OpProto::ByteSizeLong() const { ...@@ -3782,13 +3723,11 @@ size_t OpProto::ByteSizeLong() const {
if (((_has_bits_[0] & 0x00000003) ^ 0x00000003) == if (((_has_bits_[0] & 0x00000003) ^ 0x00000003) ==
0) { // All required fields are present. 0) { // All required fields are present.
// required string type = 1; // required string type = 1;
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::StringSize(
1 + ::google::protobuf::internal::WireFormatLite::StringSize(
this->type()); this->type());
// required string comment = 5; // required string comment = 5;
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::StringSize(
1 + ::google::protobuf::internal::WireFormatLite::StringSize(
this->comment()); this->comment());
} else { } else {
...@@ -3800,8 +3739,9 @@ size_t OpProto::ByteSizeLong() const { ...@@ -3800,8 +3739,9 @@ size_t OpProto::ByteSizeLong() const {
unsigned int count = static_cast<unsigned int>(this->inputs_size()); unsigned int count = static_cast<unsigned int>(this->inputs_size());
total_size += 1UL * count; total_size += 1UL * count;
for (unsigned int i = 0; i < count; i++) { for (unsigned int i = 0; i < count; i++) {
total_size += ::google::protobuf::internal::WireFormatLite:: total_size +=
MessageSizeNoVirtual(this->inputs(static_cast<int>(i))); ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->inputs(static_cast<int>(i)));
} }
} }
...@@ -3811,8 +3751,9 @@ size_t OpProto::ByteSizeLong() const { ...@@ -3811,8 +3751,9 @@ size_t OpProto::ByteSizeLong() const {
unsigned int count = static_cast<unsigned int>(this->outputs_size()); unsigned int count = static_cast<unsigned int>(this->outputs_size());
total_size += 1UL * count; total_size += 1UL * count;
for (unsigned int i = 0; i < count; i++) { for (unsigned int i = 0; i < count; i++) {
total_size += ::google::protobuf::internal::WireFormatLite:: total_size +=
MessageSizeNoVirtual(this->outputs(static_cast<int>(i))); ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->outputs(static_cast<int>(i)));
} }
} }
...@@ -3822,8 +3763,9 @@ size_t OpProto::ByteSizeLong() const { ...@@ -3822,8 +3763,9 @@ size_t OpProto::ByteSizeLong() const {
unsigned int count = static_cast<unsigned int>(this->attrs_size()); unsigned int count = static_cast<unsigned int>(this->attrs_size());
total_size += 1UL * count; total_size += 1UL * count;
for (unsigned int i = 0; i < count; i++) { for (unsigned int i = 0; i < count; i++) {
total_size += ::google::protobuf::internal::WireFormatLite:: total_size +=
MessageSizeNoVirtual(this->attrs(static_cast<int>(i))); ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->attrs(static_cast<int>(i)));
} }
} }
...@@ -3925,15 +3867,14 @@ const ::std::string &OpProto::type() const { ...@@ -3925,15 +3867,14 @@ const ::std::string &OpProto::type() const {
} }
void OpProto::set_type(const ::std::string &value) { void OpProto::set_type(const ::std::string &value) {
set_has_type(); set_has_type();
type_.SetNoArena( type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); value);
// @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpProto.type) // @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpProto.type)
} }
#if LANG_CXX11 #if LANG_CXX11
void OpProto::set_type(::std::string &&value) { void OpProto::set_type(::std::string &&value) {
set_has_type(); set_has_type();
type_.SetNoArena( type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::move(value)); ::std::move(value));
// @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpProto.type) // @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpProto.type)
} }
...@@ -3941,15 +3882,13 @@ void OpProto::set_type(::std::string &&value) { ...@@ -3941,15 +3882,13 @@ void OpProto::set_type(::std::string &&value) {
void OpProto::set_type(const char *value) { void OpProto::set_type(const char *value) {
GOOGLE_DCHECK(value != NULL); GOOGLE_DCHECK(value != NULL);
set_has_type(); set_has_type();
type_.SetNoArena( type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(value)); ::std::string(value));
// @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpProto.type) // @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpProto.type)
} }
void OpProto::set_type(const char *value, size_t size) { void OpProto::set_type(const char *value, size_t size) {
set_has_type(); set_has_type();
type_.SetNoArena( type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(reinterpret_cast<const char *>(value), size)); ::std::string(reinterpret_cast<const char *>(value), size));
// @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpProto.type) // @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpProto.type)
} }
...@@ -4220,23 +4159,20 @@ bool VarType_TensorDesc::MergePartialFromCodedStream( ...@@ -4220,23 +4159,20 @@ bool VarType_TensorDesc::MergePartialFromCodedStream(
tag = p.first; tag = p.first;
if (!p.second) if (!p.second)
goto handle_unusual; goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber( switch (
tag)) { ::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// required .paddle_mobile.framework.proto.VarType.Type // required .paddle_mobile.framework.proto.VarType.Type
// data_type = 1; // data_type = 1;
case 1: { case 1: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(8u /* 8 & 0xFF */)) { static_cast<::google::protobuf::uint8>(8u /* 8 & 0xFF */)) {
int value; int value;
DO_(( DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::internal::WireFormatLite::ReadPrimitive< int, ::google::protobuf::internal::WireFormatLite::TYPE_ENUM>(
int, ::google::protobuf::internal::WireFormatLite:: input, &value)));
TYPE_ENUM>(input, &value))); if (::paddle_mobile::framework::proto::VarType_Type_IsValid(value)) {
if (::paddle_mobile::framework::proto::VarType_Type_IsValid(
value)) {
set_data_type( set_data_type(
static_cast< static_cast<::paddle_mobile::framework::proto::VarType_Type>(
::paddle_mobile::framework::proto::VarType_Type>(
value)); value));
} else { } else {
unknown_fields_stream.WriteVarint32(8u); unknown_fields_stream.WriteVarint32(8u);
...@@ -4253,19 +4189,18 @@ bool VarType_TensorDesc::MergePartialFromCodedStream( ...@@ -4253,19 +4189,18 @@ bool VarType_TensorDesc::MergePartialFromCodedStream(
case 2: { case 2: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(16u /* 16 & 0xFF */)) { static_cast<::google::protobuf::uint8>(16u /* 16 & 0xFF */)) {
DO_((::google::protobuf::internal::WireFormatLite:: DO_((
ReadRepeatedPrimitive<::google::protobuf::int64, ::google::protobuf::internal::WireFormatLite::ReadRepeatedPrimitive<
::google::protobuf::internal:: ::google::protobuf::int64,
WireFormatLite::TYPE_INT64>( ::google::protobuf::internal::WireFormatLite::TYPE_INT64>(
1, 16u, input, this->mutable_dims()))); 1, 16u, input, this->mutable_dims())));
} else if (static_cast<::google::protobuf::uint8>(tag) == } else if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>( static_cast<::google::protobuf::uint8>(18u /* 18 & 0xFF */)) {
18u /* 18 & 0xFF */)) {
DO_((::google::protobuf::internal::WireFormatLite:: DO_((::google::protobuf::internal::WireFormatLite::
ReadPackedPrimitiveNoInline< ReadPackedPrimitiveNoInline<
::google::protobuf::int64, ::google::protobuf::int64,
::google::protobuf::internal::WireFormatLite:: ::google::protobuf::internal::WireFormatLite::TYPE_INT64>(
TYPE_INT64>(input, this->mutable_dims()))); input, this->mutable_dims())));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -4308,8 +4243,8 @@ void VarType_TensorDesc::SerializeWithCachedSizes( ...@@ -4308,8 +4243,8 @@ void VarType_TensorDesc::SerializeWithCachedSizes(
// repeated int64 dims = 2; // repeated int64 dims = 2;
for (int i = 0, n = this->dims_size(); i < n; i++) { for (int i = 0, n = this->dims_size(); i < n; i++) {
::google::protobuf::internal::WireFormatLite::WriteInt64( ::google::protobuf::internal::WireFormatLite::WriteInt64(2, this->dims(i),
2, this->dims(i), output); output);
} }
output->WriteRaw( output->WriteRaw(
...@@ -4327,15 +4262,13 @@ size_t VarType_TensorDesc::ByteSizeLong() const { ...@@ -4327,15 +4262,13 @@ size_t VarType_TensorDesc::ByteSizeLong() const {
// required .paddle_mobile.framework.proto.VarType.Type // required .paddle_mobile.framework.proto.VarType.Type
// data_type = 1; // data_type = 1;
if (has_data_type()) { if (has_data_type()) {
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::EnumSize(
1 + ::google::protobuf::internal::WireFormatLite::EnumSize(
this->data_type()); this->data_type());
} }
// repeated int64 dims = 2; // repeated int64 dims = 2;
{ {
size_t data_size = size_t data_size =
::google::protobuf::internal::WireFormatLite::Int64Size( ::google::protobuf::internal::WireFormatLite::Int64Size(this->dims_);
this->dims_);
total_size += total_size +=
1 * ::google::protobuf::internal::FromIntSize(this->dims_size()); 1 * ::google::protobuf::internal::FromIntSize(this->dims_size());
total_size += data_size; total_size += data_size;
...@@ -4350,8 +4283,7 @@ size_t VarType_TensorDesc::ByteSizeLong() const { ...@@ -4350,8 +4283,7 @@ size_t VarType_TensorDesc::ByteSizeLong() const {
void VarType_TensorDesc::CheckTypeAndMergeFrom( void VarType_TensorDesc::CheckTypeAndMergeFrom(
const ::google::protobuf::MessageLite &from) { const ::google::protobuf::MessageLite &from) {
MergeFrom( MergeFrom(*::google::protobuf::down_cast<const VarType_TensorDesc *>(&from));
*::google::protobuf::down_cast<const VarType_TensorDesc *>(&from));
} }
void VarType_TensorDesc::MergeFrom(const VarType_TensorDesc &from) { void VarType_TensorDesc::MergeFrom(const VarType_TensorDesc &from) {
...@@ -4558,16 +4490,16 @@ bool VarType_LoDTensorDesc::MergePartialFromCodedStream( ...@@ -4558,16 +4490,16 @@ bool VarType_LoDTensorDesc::MergePartialFromCodedStream(
tag = p.first; tag = p.first;
if (!p.second) if (!p.second)
goto handle_unusual; goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber( switch (
tag)) { ::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// required // required
// .paddle_mobile.framework.proto.VarType.TensorDesc tensor // .paddle_mobile.framework.proto.VarType.TensorDesc tensor
// = 1; // = 1;
case 1: { case 1: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(10u /* 10 & 0xFF */)) { static_cast<::google::protobuf::uint8>(10u /* 10 & 0xFF */)) {
DO_(::google::protobuf::internal::WireFormatLite:: DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
ReadMessageNoVirtual(input, mutable_tensor())); input, mutable_tensor()));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -4579,11 +4511,10 @@ bool VarType_LoDTensorDesc::MergePartialFromCodedStream( ...@@ -4579,11 +4511,10 @@ bool VarType_LoDTensorDesc::MergePartialFromCodedStream(
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(16u /* 16 & 0xFF */)) { static_cast<::google::protobuf::uint8>(16u /* 16 & 0xFF */)) {
set_has_lod_level(); set_has_lod_level();
DO_(( DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::int32, ::google::protobuf::int32,
::google::protobuf::internal::WireFormatLite:: ::google::protobuf::internal::WireFormatLite::TYPE_INT32>(
TYPE_INT32>(input, &lod_level_))); input, &lod_level_)));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -4646,14 +4577,12 @@ size_t VarType_LoDTensorDesc::ByteSizeLong() const { ...@@ -4646,14 +4577,12 @@ size_t VarType_LoDTensorDesc::ByteSizeLong() const {
// tensor = 1; // tensor = 1;
if (has_tensor()) { if (has_tensor()) {
total_size += total_size +=
1 + 1 + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
*this->tensor_); *this->tensor_);
} }
// optional int32 lod_level = 2 [default = 0]; // optional int32 lod_level = 2 [default = 0];
if (has_lod_level()) { if (has_lod_level()) {
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::Int32Size(
1 + ::google::protobuf::internal::WireFormatLite::Int32Size(
this->lod_level()); this->lod_level());
} }
...@@ -4680,8 +4609,9 @@ void VarType_LoDTensorDesc::MergeFrom(const VarType_LoDTensorDesc &from) { ...@@ -4680,8 +4609,9 @@ void VarType_LoDTensorDesc::MergeFrom(const VarType_LoDTensorDesc &from) {
cached_has_bits = from._has_bits_[0]; cached_has_bits = from._has_bits_[0];
if (cached_has_bits & 3u) { if (cached_has_bits & 3u) {
if (cached_has_bits & 0x00000001u) { if (cached_has_bits & 0x00000001u) {
mutable_tensor()->::paddle_mobile::framework::proto:: mutable_tensor()
VarType_TensorDesc::MergeFrom(from.tensor()); ->::paddle_mobile::framework::proto::VarType_TensorDesc::MergeFrom(
from.tensor());
} }
if (cached_has_bits & 0x00000002u) { if (cached_has_bits & 0x00000002u) {
lod_level_ = from.lod_level_; lod_level_ = from.lod_level_;
...@@ -4912,16 +4842,16 @@ bool VarType_LoDTensorArrayDesc::MergePartialFromCodedStream( ...@@ -4912,16 +4842,16 @@ bool VarType_LoDTensorArrayDesc::MergePartialFromCodedStream(
tag = p.first; tag = p.first;
if (!p.second) if (!p.second)
goto handle_unusual; goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber( switch (
tag)) { ::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// required // required
// .paddle_mobile.framework.proto.VarType.TensorDesc tensor // .paddle_mobile.framework.proto.VarType.TensorDesc tensor
// = 1; // = 1;
case 1: { case 1: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(10u /* 10 & 0xFF */)) { static_cast<::google::protobuf::uint8>(10u /* 10 & 0xFF */)) {
DO_(::google::protobuf::internal::WireFormatLite:: DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
ReadMessageNoVirtual(input, mutable_tensor())); input, mutable_tensor()));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -4933,11 +4863,10 @@ bool VarType_LoDTensorArrayDesc::MergePartialFromCodedStream( ...@@ -4933,11 +4863,10 @@ bool VarType_LoDTensorArrayDesc::MergePartialFromCodedStream(
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(16u /* 16 & 0xFF */)) { static_cast<::google::protobuf::uint8>(16u /* 16 & 0xFF */)) {
set_has_lod_level(); set_has_lod_level();
DO_(( DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::int32, ::google::protobuf::int32,
::google::protobuf::internal::WireFormatLite:: ::google::protobuf::internal::WireFormatLite::TYPE_INT32>(
TYPE_INT32>(input, &lod_level_))); input, &lod_level_)));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -5000,14 +4929,12 @@ size_t VarType_LoDTensorArrayDesc::ByteSizeLong() const { ...@@ -5000,14 +4929,12 @@ size_t VarType_LoDTensorArrayDesc::ByteSizeLong() const {
// tensor = 1; // tensor = 1;
if (has_tensor()) { if (has_tensor()) {
total_size += total_size +=
1 + 1 + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
*this->tensor_); *this->tensor_);
} }
// optional int32 lod_level = 2 [default = 0]; // optional int32 lod_level = 2 [default = 0];
if (has_lod_level()) { if (has_lod_level()) {
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::Int32Size(
1 + ::google::protobuf::internal::WireFormatLite::Int32Size(
this->lod_level()); this->lod_level());
} }
...@@ -5020,8 +4947,7 @@ size_t VarType_LoDTensorArrayDesc::ByteSizeLong() const { ...@@ -5020,8 +4947,7 @@ size_t VarType_LoDTensorArrayDesc::ByteSizeLong() const {
void VarType_LoDTensorArrayDesc::CheckTypeAndMergeFrom( void VarType_LoDTensorArrayDesc::CheckTypeAndMergeFrom(
const ::google::protobuf::MessageLite &from) { const ::google::protobuf::MessageLite &from) {
MergeFrom( MergeFrom(*::google::protobuf::down_cast<const VarType_LoDTensorArrayDesc *>(
*::google::protobuf::down_cast<const VarType_LoDTensorArrayDesc *>(
&from)); &from));
} }
...@@ -5036,8 +4962,9 @@ void VarType_LoDTensorArrayDesc::MergeFrom( ...@@ -5036,8 +4962,9 @@ void VarType_LoDTensorArrayDesc::MergeFrom(
cached_has_bits = from._has_bits_[0]; cached_has_bits = from._has_bits_[0];
if (cached_has_bits & 3u) { if (cached_has_bits & 3u) {
if (cached_has_bits & 0x00000001u) { if (cached_has_bits & 0x00000001u) {
mutable_tensor()->::paddle_mobile::framework::proto:: mutable_tensor()
VarType_TensorDesc::MergeFrom(from.tensor()); ->::paddle_mobile::framework::proto::VarType_TensorDesc::MergeFrom(
from.tensor());
} }
if (cached_has_bits & 0x00000002u) { if (cached_has_bits & 0x00000002u) {
lod_level_ = from.lod_level_; lod_level_ = from.lod_level_;
...@@ -5252,16 +5179,16 @@ bool VarType_ReaderDesc::MergePartialFromCodedStream( ...@@ -5252,16 +5179,16 @@ bool VarType_ReaderDesc::MergePartialFromCodedStream(
tag = p.first; tag = p.first;
if (!p.second) if (!p.second)
goto handle_unusual; goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber( switch (
tag)) { ::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// repeated // repeated
// .paddle_mobile.framework.proto.VarType.LoDTensorDesc // .paddle_mobile.framework.proto.VarType.LoDTensorDesc
// lod_tensor = 1; // lod_tensor = 1;
case 1: { case 1: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(10u /* 10 & 0xFF */)) { static_cast<::google::protobuf::uint8>(10u /* 10 & 0xFF */)) {
DO_(::google::protobuf::internal::WireFormatLite:: DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
ReadMessageNoVirtual(input, add_lod_tensor())); input, add_lod_tensor()));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -5323,8 +5250,9 @@ size_t VarType_ReaderDesc::ByteSizeLong() const { ...@@ -5323,8 +5250,9 @@ size_t VarType_ReaderDesc::ByteSizeLong() const {
unsigned int count = static_cast<unsigned int>(this->lod_tensor_size()); unsigned int count = static_cast<unsigned int>(this->lod_tensor_size());
total_size += 1UL * count; total_size += 1UL * count;
for (unsigned int i = 0; i < count; i++) { for (unsigned int i = 0; i < count; i++) {
total_size += ::google::protobuf::internal::WireFormatLite:: total_size +=
MessageSizeNoVirtual(this->lod_tensor(static_cast<int>(i))); ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->lod_tensor(static_cast<int>(i)));
} }
} }
...@@ -5337,8 +5265,7 @@ size_t VarType_ReaderDesc::ByteSizeLong() const { ...@@ -5337,8 +5265,7 @@ size_t VarType_ReaderDesc::ByteSizeLong() const {
void VarType_ReaderDesc::CheckTypeAndMergeFrom( void VarType_ReaderDesc::CheckTypeAndMergeFrom(
const ::google::protobuf::MessageLite &from) { const ::google::protobuf::MessageLite &from) {
MergeFrom( MergeFrom(*::google::protobuf::down_cast<const VarType_ReaderDesc *>(&from));
*::google::protobuf::down_cast<const VarType_ReaderDesc *>(&from));
} }
void VarType_ReaderDesc::MergeFrom(const VarType_ReaderDesc &from) { void VarType_ReaderDesc::MergeFrom(const VarType_ReaderDesc &from) {
...@@ -5516,23 +5443,20 @@ bool VarType_ChannelDesc::MergePartialFromCodedStream( ...@@ -5516,23 +5443,20 @@ bool VarType_ChannelDesc::MergePartialFromCodedStream(
tag = p.first; tag = p.first;
if (!p.second) if (!p.second)
goto handle_unusual; goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber( switch (
tag)) { ::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// required .paddle_mobile.framework.proto.VarType.Type // required .paddle_mobile.framework.proto.VarType.Type
// data_type = 1; // data_type = 1;
case 1: { case 1: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(8u /* 8 & 0xFF */)) { static_cast<::google::protobuf::uint8>(8u /* 8 & 0xFF */)) {
int value; int value;
DO_(( DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::internal::WireFormatLite::ReadPrimitive< int, ::google::protobuf::internal::WireFormatLite::TYPE_ENUM>(
int, ::google::protobuf::internal::WireFormatLite:: input, &value)));
TYPE_ENUM>(input, &value))); if (::paddle_mobile::framework::proto::VarType_Type_IsValid(value)) {
if (::paddle_mobile::framework::proto::VarType_Type_IsValid(
value)) {
set_data_type( set_data_type(
static_cast< static_cast<::paddle_mobile::framework::proto::VarType_Type>(
::paddle_mobile::framework::proto::VarType_Type>(
value)); value));
} else { } else {
unknown_fields_stream.WriteVarint32(8u); unknown_fields_stream.WriteVarint32(8u);
...@@ -5550,11 +5474,10 @@ bool VarType_ChannelDesc::MergePartialFromCodedStream( ...@@ -5550,11 +5474,10 @@ bool VarType_ChannelDesc::MergePartialFromCodedStream(
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(16u /* 16 & 0xFF */)) { static_cast<::google::protobuf::uint8>(16u /* 16 & 0xFF */)) {
set_has_capacity(); set_has_capacity();
DO_(( DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::int64, ::google::protobuf::int64,
::google::protobuf::internal::WireFormatLite:: ::google::protobuf::internal::WireFormatLite::TYPE_INT64>(
TYPE_INT64>(input, &capacity_))); input, &capacity_)));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -5613,16 +5536,14 @@ size_t VarType_ChannelDesc::RequiredFieldsByteSizeFallback() const { ...@@ -5613,16 +5536,14 @@ size_t VarType_ChannelDesc::RequiredFieldsByteSizeFallback() const {
if (has_capacity()) { if (has_capacity()) {
// required int64 capacity = 2; // required int64 capacity = 2;
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::Int64Size(
1 + ::google::protobuf::internal::WireFormatLite::Int64Size(
this->capacity()); this->capacity());
} }
if (has_data_type()) { if (has_data_type()) {
// required .paddle_mobile.framework.proto.VarType.Type // required .paddle_mobile.framework.proto.VarType.Type
// data_type = 1; // data_type = 1;
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::EnumSize(
1 + ::google::protobuf::internal::WireFormatLite::EnumSize(
this->data_type()); this->data_type());
} }
...@@ -5637,14 +5558,12 @@ size_t VarType_ChannelDesc::ByteSizeLong() const { ...@@ -5637,14 +5558,12 @@ size_t VarType_ChannelDesc::ByteSizeLong() const {
if (((_has_bits_[0] & 0x00000003) ^ 0x00000003) == if (((_has_bits_[0] & 0x00000003) ^ 0x00000003) ==
0) { // All required fields are present. 0) { // All required fields are present.
// required int64 capacity = 2; // required int64 capacity = 2;
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::Int64Size(
1 + ::google::protobuf::internal::WireFormatLite::Int64Size(
this->capacity()); this->capacity());
// required .paddle_mobile.framework.proto.VarType.Type // required .paddle_mobile.framework.proto.VarType.Type
// data_type = 1; // data_type = 1;
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::EnumSize(
1 + ::google::protobuf::internal::WireFormatLite::EnumSize(
this->data_type()); this->data_type());
} else { } else {
...@@ -5659,8 +5578,7 @@ size_t VarType_ChannelDesc::ByteSizeLong() const { ...@@ -5659,8 +5578,7 @@ size_t VarType_ChannelDesc::ByteSizeLong() const {
void VarType_ChannelDesc::CheckTypeAndMergeFrom( void VarType_ChannelDesc::CheckTypeAndMergeFrom(
const ::google::protobuf::MessageLite &from) { const ::google::protobuf::MessageLite &from) {
MergeFrom( MergeFrom(*::google::protobuf::down_cast<const VarType_ChannelDesc *>(&from));
*::google::protobuf::down_cast<const VarType_ChannelDesc *>(&from));
} }
void VarType_ChannelDesc::MergeFrom(const VarType_ChannelDesc &from) { void VarType_ChannelDesc::MergeFrom(const VarType_ChannelDesc &from) {
...@@ -5848,23 +5766,20 @@ bool VarType_Tuple::MergePartialFromCodedStream( ...@@ -5848,23 +5766,20 @@ bool VarType_Tuple::MergePartialFromCodedStream(
tag = p.first; tag = p.first;
if (!p.second) if (!p.second)
goto handle_unusual; goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber( switch (
tag)) { ::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// repeated .paddle_mobile.framework.proto.VarType.Type // repeated .paddle_mobile.framework.proto.VarType.Type
// element_type = 1; // element_type = 1;
case 1: { case 1: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(8u /* 8 & 0xFF */)) { static_cast<::google::protobuf::uint8>(8u /* 8 & 0xFF */)) {
int value; int value;
DO_(( DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::internal::WireFormatLite::ReadPrimitive< int, ::google::protobuf::internal::WireFormatLite::TYPE_ENUM>(
int, ::google::protobuf::internal::WireFormatLite:: input, &value)));
TYPE_ENUM>(input, &value))); if (::paddle_mobile::framework::proto::VarType_Type_IsValid(value)) {
if (::paddle_mobile::framework::proto::VarType_Type_IsValid(
value)) {
add_element_type( add_element_type(
static_cast< static_cast<::paddle_mobile::framework::proto::VarType_Type>(
::paddle_mobile::framework::proto::VarType_Type>(
value)); value));
} else { } else {
unknown_fields_stream.WriteVarint32(tag); unknown_fields_stream.WriteVarint32(tag);
...@@ -5872,15 +5787,12 @@ bool VarType_Tuple::MergePartialFromCodedStream( ...@@ -5872,15 +5787,12 @@ bool VarType_Tuple::MergePartialFromCodedStream(
static_cast<::google::protobuf::uint32>(value)); static_cast<::google::protobuf::uint32>(value));
} }
} else if (static_cast<::google::protobuf::uint8>(tag) == } else if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>( static_cast<::google::protobuf::uint8>(10u /* 10 & 0xFF */)) {
10u /* 10 & 0xFF */)) {
DO_((::google::protobuf::internal::WireFormatLite:: DO_((::google::protobuf::internal::WireFormatLite::
ReadPackedEnumPreserveUnknowns( ReadPackedEnumPreserveUnknowns(
input, 1, input, 1,
::paddle_mobile::framework::proto:: ::paddle_mobile::framework::proto::VarType_Type_IsValid,
VarType_Type_IsValid, &unknown_fields_stream, this->mutable_element_type())));
&unknown_fields_stream,
this->mutable_element_type())));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -5936,8 +5848,7 @@ size_t VarType_Tuple::ByteSizeLong() const { ...@@ -5936,8 +5848,7 @@ size_t VarType_Tuple::ByteSizeLong() const {
// element_type = 1; // element_type = 1;
{ {
size_t data_size = 0; size_t data_size = 0;
unsigned int count = unsigned int count = static_cast<unsigned int>(this->element_type_size());
static_cast<unsigned int>(this->element_type_size());
for (unsigned int i = 0; i < count; i++) { for (unsigned int i = 0; i < count; i++) {
data_size += ::google::protobuf::internal::WireFormatLite::EnumSize( data_size += ::google::protobuf::internal::WireFormatLite::EnumSize(
this->element_type(static_cast<int>(i))); this->element_type(static_cast<int>(i)));
...@@ -6056,15 +5967,13 @@ VarType::VarType(const VarType &from) ...@@ -6056,15 +5967,13 @@ VarType::VarType(const VarType &from)
_has_bits_(from._has_bits_), _cached_size_(0) { _has_bits_(from._has_bits_), _cached_size_(0) {
_internal_metadata_.MergeFrom(from._internal_metadata_); _internal_metadata_.MergeFrom(from._internal_metadata_);
if (from.has_selected_rows()) { if (from.has_selected_rows()) {
selected_rows_ = selected_rows_ = new ::paddle_mobile::framework::proto::VarType_TensorDesc(
new ::paddle_mobile::framework::proto::VarType_TensorDesc(
*from.selected_rows_); *from.selected_rows_);
} else { } else {
selected_rows_ = NULL; selected_rows_ = NULL;
} }
if (from.has_lod_tensor()) { if (from.has_lod_tensor()) {
lod_tensor_ = lod_tensor_ = new ::paddle_mobile::framework::proto::VarType_LoDTensorDesc(
new ::paddle_mobile::framework::proto::VarType_LoDTensorDesc(
*from.lod_tensor_); *from.lod_tensor_);
} else { } else {
lod_tensor_ = NULL; lod_tensor_ = NULL;
...@@ -6089,8 +5998,7 @@ VarType::VarType(const VarType &from) ...@@ -6089,8 +5998,7 @@ VarType::VarType(const VarType &from)
channel_ = NULL; channel_ = NULL;
} }
if (from.has_tuple()) { if (from.has_tuple()) {
tuple_ = tuple_ = new ::paddle_mobile::framework::proto::VarType_Tuple(*from.tuple_);
new ::paddle_mobile::framework::proto::VarType_Tuple(*from.tuple_);
} else { } else {
tuple_ = NULL; tuple_ = NULL;
} }
...@@ -6154,13 +6062,13 @@ void VarType::Clear() { ...@@ -6154,13 +6062,13 @@ void VarType::Clear() {
if (cached_has_bits & 63u) { if (cached_has_bits & 63u) {
if (cached_has_bits & 0x00000001u) { if (cached_has_bits & 0x00000001u) {
GOOGLE_DCHECK(selected_rows_ != NULL); GOOGLE_DCHECK(selected_rows_ != NULL);
selected_rows_->::paddle_mobile::framework::proto:: selected_rows_
VarType_TensorDesc::Clear(); ->::paddle_mobile::framework::proto::VarType_TensorDesc::Clear();
} }
if (cached_has_bits & 0x00000002u) { if (cached_has_bits & 0x00000002u) {
GOOGLE_DCHECK(lod_tensor_ != NULL); GOOGLE_DCHECK(lod_tensor_ != NULL);
lod_tensor_->::paddle_mobile::framework::proto:: lod_tensor_
VarType_LoDTensorDesc::Clear(); ->::paddle_mobile::framework::proto::VarType_LoDTensorDesc::Clear();
} }
if (cached_has_bits & 0x00000004u) { if (cached_has_bits & 0x00000004u) {
GOOGLE_DCHECK(tensor_array_ != NULL); GOOGLE_DCHECK(tensor_array_ != NULL);
...@@ -6169,13 +6077,11 @@ void VarType::Clear() { ...@@ -6169,13 +6077,11 @@ void VarType::Clear() {
} }
if (cached_has_bits & 0x00000008u) { if (cached_has_bits & 0x00000008u) {
GOOGLE_DCHECK(reader_ != NULL); GOOGLE_DCHECK(reader_ != NULL);
reader_->::paddle_mobile::framework::proto::VarType_ReaderDesc:: reader_->::paddle_mobile::framework::proto::VarType_ReaderDesc::Clear();
Clear();
} }
if (cached_has_bits & 0x00000010u) { if (cached_has_bits & 0x00000010u) {
GOOGLE_DCHECK(channel_ != NULL); GOOGLE_DCHECK(channel_ != NULL);
channel_->::paddle_mobile::framework::proto::VarType_ChannelDesc:: channel_->::paddle_mobile::framework::proto::VarType_ChannelDesc::Clear();
Clear();
} }
if (cached_has_bits & 0x00000020u) { if (cached_has_bits & 0x00000020u) {
GOOGLE_DCHECK(tuple_ != NULL); GOOGLE_DCHECK(tuple_ != NULL);
...@@ -6207,22 +6113,19 @@ bool VarType::MergePartialFromCodedStream( ...@@ -6207,22 +6113,19 @@ bool VarType::MergePartialFromCodedStream(
tag = p.first; tag = p.first;
if (!p.second) if (!p.second)
goto handle_unusual; goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber( switch (
tag)) { ::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// required .paddle_mobile.framework.proto.VarType.Type type // required .paddle_mobile.framework.proto.VarType.Type type
// = 1; // = 1;
case 1: { case 1: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(8u /* 8 & 0xFF */)) { static_cast<::google::protobuf::uint8>(8u /* 8 & 0xFF */)) {
int value; int value;
DO_(( DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::internal::WireFormatLite::ReadPrimitive< int, ::google::protobuf::internal::WireFormatLite::TYPE_ENUM>(
int, ::google::protobuf::internal::WireFormatLite:: input, &value)));
TYPE_ENUM>(input, &value))); if (::paddle_mobile::framework::proto::VarType_Type_IsValid(value)) {
if (::paddle_mobile::framework::proto::VarType_Type_IsValid( set_type(static_cast<::paddle_mobile::framework::proto::VarType_Type>(
value)) {
set_type(static_cast<
::paddle_mobile::framework::proto::VarType_Type>(
value)); value));
} else { } else {
unknown_fields_stream.WriteVarint32(8u); unknown_fields_stream.WriteVarint32(8u);
...@@ -6241,8 +6144,8 @@ bool VarType::MergePartialFromCodedStream( ...@@ -6241,8 +6144,8 @@ bool VarType::MergePartialFromCodedStream(
case 2: { case 2: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(18u /* 18 & 0xFF */)) { static_cast<::google::protobuf::uint8>(18u /* 18 & 0xFF */)) {
DO_(::google::protobuf::internal::WireFormatLite:: DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
ReadMessageNoVirtual(input, mutable_selected_rows())); input, mutable_selected_rows()));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -6255,8 +6158,8 @@ bool VarType::MergePartialFromCodedStream( ...@@ -6255,8 +6158,8 @@ bool VarType::MergePartialFromCodedStream(
case 3: { case 3: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(26u /* 26 & 0xFF */)) { static_cast<::google::protobuf::uint8>(26u /* 26 & 0xFF */)) {
DO_(::google::protobuf::internal::WireFormatLite:: DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
ReadMessageNoVirtual(input, mutable_lod_tensor())); input, mutable_lod_tensor()));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -6269,8 +6172,8 @@ bool VarType::MergePartialFromCodedStream( ...@@ -6269,8 +6172,8 @@ bool VarType::MergePartialFromCodedStream(
case 4: { case 4: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(34u /* 34 & 0xFF */)) { static_cast<::google::protobuf::uint8>(34u /* 34 & 0xFF */)) {
DO_(::google::protobuf::internal::WireFormatLite:: DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
ReadMessageNoVirtual(input, mutable_tensor_array())); input, mutable_tensor_array()));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -6283,8 +6186,8 @@ bool VarType::MergePartialFromCodedStream( ...@@ -6283,8 +6186,8 @@ bool VarType::MergePartialFromCodedStream(
case 5: { case 5: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(42u /* 42 & 0xFF */)) { static_cast<::google::protobuf::uint8>(42u /* 42 & 0xFF */)) {
DO_(::google::protobuf::internal::WireFormatLite:: DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
ReadMessageNoVirtual(input, mutable_reader())); input, mutable_reader()));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -6298,8 +6201,8 @@ bool VarType::MergePartialFromCodedStream( ...@@ -6298,8 +6201,8 @@ bool VarType::MergePartialFromCodedStream(
case 6: { case 6: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(50u /* 50 & 0xFF */)) { static_cast<::google::protobuf::uint8>(50u /* 50 & 0xFF */)) {
DO_(::google::protobuf::internal::WireFormatLite:: DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
ReadMessageNoVirtual(input, mutable_channel())); input, mutable_channel()));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -6311,8 +6214,8 @@ bool VarType::MergePartialFromCodedStream( ...@@ -6311,8 +6214,8 @@ bool VarType::MergePartialFromCodedStream(
case 7: { case 7: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(58u /* 58 & 0xFF */)) { static_cast<::google::protobuf::uint8>(58u /* 58 & 0xFF */)) {
DO_(::google::protobuf::internal::WireFormatLite:: DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
ReadMessageNoVirtual(input, mutable_tuple())); input, mutable_tuple()));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -6394,8 +6297,8 @@ void VarType::SerializeWithCachedSizes( ...@@ -6394,8 +6297,8 @@ void VarType::SerializeWithCachedSizes(
// optional .paddle_mobile.framework.proto.VarType.Tuple tuple = // optional .paddle_mobile.framework.proto.VarType.Tuple tuple =
// 7; // 7;
if (cached_has_bits & 0x00000020u) { if (cached_has_bits & 0x00000020u) {
::google::protobuf::internal::WireFormatLite::WriteMessage( ::google::protobuf::internal::WireFormatLite::WriteMessage(7, *this->tuple_,
7, *this->tuple_, output); output);
} }
output->WriteRaw( output->WriteRaw(
...@@ -6413,8 +6316,7 @@ size_t VarType::ByteSizeLong() const { ...@@ -6413,8 +6316,7 @@ size_t VarType::ByteSizeLong() const {
// required .paddle_mobile.framework.proto.VarType.Type type = // required .paddle_mobile.framework.proto.VarType.Type type =
// 1; // 1;
if (has_type()) { if (has_type()) {
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::EnumSize(
1 + ::google::protobuf::internal::WireFormatLite::EnumSize(
this->type()); this->type());
} }
if (_has_bits_[0 / 32] & 63u) { if (_has_bits_[0 / 32] & 63u) {
...@@ -6423,8 +6325,10 @@ size_t VarType::ByteSizeLong() const { ...@@ -6423,8 +6325,10 @@ size_t VarType::ByteSizeLong() const {
// selected_rows // selected_rows
// = 2; // = 2;
if (has_selected_rows()) { if (has_selected_rows()) {
total_size += 1 + ::google::protobuf::internal::WireFormatLite:: total_size +=
MessageSizeNoVirtual(*this->selected_rows_); 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
*this->selected_rows_);
} }
// optional // optional
...@@ -6432,39 +6336,49 @@ size_t VarType::ByteSizeLong() const { ...@@ -6432,39 +6336,49 @@ size_t VarType::ByteSizeLong() const {
// lod_tensor // lod_tensor
// = 3; // = 3;
if (has_lod_tensor()) { if (has_lod_tensor()) {
total_size += 1 + ::google::protobuf::internal::WireFormatLite:: total_size +=
MessageSizeNoVirtual(*this->lod_tensor_); 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
*this->lod_tensor_);
} }
// optional // optional
// .paddle_mobile.framework.proto.VarType.LoDTensorArrayDesc // .paddle_mobile.framework.proto.VarType.LoDTensorArrayDesc
// tensor_array = 4; // tensor_array = 4;
if (has_tensor_array()) { if (has_tensor_array()) {
total_size += 1 + ::google::protobuf::internal::WireFormatLite:: total_size +=
MessageSizeNoVirtual(*this->tensor_array_); 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
*this->tensor_array_);
} }
// optional // optional
// .paddle_mobile.framework.proto.VarType.ReaderDesc reader // .paddle_mobile.framework.proto.VarType.ReaderDesc reader
// = 5; // = 5;
if (has_reader()) { if (has_reader()) {
total_size += 1 + ::google::protobuf::internal::WireFormatLite:: total_size +=
MessageSizeNoVirtual(*this->reader_); 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
*this->reader_);
} }
// optional // optional
// .paddle_mobile.framework.proto.VarType.ChannelDesc // .paddle_mobile.framework.proto.VarType.ChannelDesc
// channel = 6; // channel = 6;
if (has_channel()) { if (has_channel()) {
total_size += 1 + ::google::protobuf::internal::WireFormatLite:: total_size +=
MessageSizeNoVirtual(*this->channel_); 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
*this->channel_);
} }
// optional .paddle_mobile.framework.proto.VarType.Tuple // optional .paddle_mobile.framework.proto.VarType.Tuple
// tuple = 7; // tuple = 7;
if (has_tuple()) { if (has_tuple()) {
total_size += 1 + ::google::protobuf::internal::WireFormatLite:: total_size +=
MessageSizeNoVirtual(*this->tuple_); 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
*this->tuple_);
} }
} }
int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); int cached_size = ::google::protobuf::internal::ToCachedSize(total_size);
...@@ -6489,24 +6403,28 @@ void VarType::MergeFrom(const VarType &from) { ...@@ -6489,24 +6403,28 @@ void VarType::MergeFrom(const VarType &from) {
cached_has_bits = from._has_bits_[0]; cached_has_bits = from._has_bits_[0];
if (cached_has_bits & 127u) { if (cached_has_bits & 127u) {
if (cached_has_bits & 0x00000001u) { if (cached_has_bits & 0x00000001u) {
mutable_selected_rows()->::paddle_mobile::framework::proto:: mutable_selected_rows()
VarType_TensorDesc::MergeFrom(from.selected_rows()); ->::paddle_mobile::framework::proto::VarType_TensorDesc::MergeFrom(
from.selected_rows());
} }
if (cached_has_bits & 0x00000002u) { if (cached_has_bits & 0x00000002u) {
mutable_lod_tensor()->::paddle_mobile::framework::proto:: mutable_lod_tensor()
VarType_LoDTensorDesc::MergeFrom(from.lod_tensor()); ->::paddle_mobile::framework::proto::VarType_LoDTensorDesc::MergeFrom(
from.lod_tensor());
} }
if (cached_has_bits & 0x00000004u) { if (cached_has_bits & 0x00000004u) {
mutable_tensor_array()->::paddle_mobile::framework::proto:: mutable_tensor_array()->::paddle_mobile::framework::proto::
VarType_LoDTensorArrayDesc::MergeFrom(from.tensor_array()); VarType_LoDTensorArrayDesc::MergeFrom(from.tensor_array());
} }
if (cached_has_bits & 0x00000008u) { if (cached_has_bits & 0x00000008u) {
mutable_reader()->::paddle_mobile::framework::proto:: mutable_reader()
VarType_ReaderDesc::MergeFrom(from.reader()); ->::paddle_mobile::framework::proto::VarType_ReaderDesc::MergeFrom(
from.reader());
} }
if (cached_has_bits & 0x00000010u) { if (cached_has_bits & 0x00000010u) {
mutable_channel()->::paddle_mobile::framework::proto:: mutable_channel()
VarType_ChannelDesc::MergeFrom(from.channel()); ->::paddle_mobile::framework::proto::VarType_ChannelDesc::MergeFrom(
from.channel());
} }
if (cached_has_bits & 0x00000020u) { if (cached_has_bits & 0x00000020u) {
mutable_tuple() mutable_tuple()
...@@ -6628,8 +6546,7 @@ VarType::selected_rows() const { ...@@ -6628,8 +6546,7 @@ VarType::selected_rows() const {
VarType::mutable_selected_rows() { VarType::mutable_selected_rows() {
set_has_selected_rows(); set_has_selected_rows();
if (selected_rows_ == NULL) { if (selected_rows_ == NULL) {
selected_rows_ = selected_rows_ = new ::paddle_mobile::framework::proto::VarType_TensorDesc;
new ::paddle_mobile::framework::proto::VarType_TensorDesc;
} }
// @@protoc_insertion_point(field_mutable:paddle_mobile.framework.proto.VarType.selected_rows) // @@protoc_insertion_point(field_mutable:paddle_mobile.framework.proto.VarType.selected_rows)
return selected_rows_; return selected_rows_;
...@@ -6638,8 +6555,7 @@ VarType::mutable_selected_rows() { ...@@ -6638,8 +6555,7 @@ VarType::mutable_selected_rows() {
VarType::release_selected_rows() { VarType::release_selected_rows() {
// @@protoc_insertion_point(field_release:paddle_mobile.framework.proto.VarType.selected_rows) // @@protoc_insertion_point(field_release:paddle_mobile.framework.proto.VarType.selected_rows)
clear_has_selected_rows(); clear_has_selected_rows();
::paddle_mobile::framework::proto::VarType_TensorDesc *temp = ::paddle_mobile::framework::proto::VarType_TensorDesc *temp = selected_rows_;
selected_rows_;
selected_rows_ = NULL; selected_rows_ = NULL;
return temp; return temp;
} }
...@@ -6674,8 +6590,8 @@ VarType::lod_tensor() const { ...@@ -6674,8 +6590,8 @@ VarType::lod_tensor() const {
lod_tensor_; lod_tensor_;
// @@protoc_insertion_point(field_get:paddle_mobile.framework.proto.VarType.lod_tensor) // @@protoc_insertion_point(field_get:paddle_mobile.framework.proto.VarType.lod_tensor)
return p != NULL ? *p return p != NULL ? *p
: *reinterpret_cast<const ::paddle_mobile::framework:: : *reinterpret_cast<const ::paddle_mobile::framework::proto::
proto::VarType_LoDTensorDesc *>( VarType_LoDTensorDesc *>(
&::paddle_mobile::framework::proto:: &::paddle_mobile::framework::proto::
_VarType_LoDTensorDesc_default_instance_); _VarType_LoDTensorDesc_default_instance_);
} }
...@@ -6683,8 +6599,7 @@ VarType::lod_tensor() const { ...@@ -6683,8 +6599,7 @@ VarType::lod_tensor() const {
VarType::mutable_lod_tensor() { VarType::mutable_lod_tensor() {
set_has_lod_tensor(); set_has_lod_tensor();
if (lod_tensor_ == NULL) { if (lod_tensor_ == NULL) {
lod_tensor_ = lod_tensor_ = new ::paddle_mobile::framework::proto::VarType_LoDTensorDesc;
new ::paddle_mobile::framework::proto::VarType_LoDTensorDesc;
} }
// @@protoc_insertion_point(field_mutable:paddle_mobile.framework.proto.VarType.lod_tensor) // @@protoc_insertion_point(field_mutable:paddle_mobile.framework.proto.VarType.lod_tensor)
return lod_tensor_; return lod_tensor_;
...@@ -6693,8 +6608,7 @@ VarType::mutable_lod_tensor() { ...@@ -6693,8 +6608,7 @@ VarType::mutable_lod_tensor() {
VarType::release_lod_tensor() { VarType::release_lod_tensor() {
// @@protoc_insertion_point(field_release:paddle_mobile.framework.proto.VarType.lod_tensor) // @@protoc_insertion_point(field_release:paddle_mobile.framework.proto.VarType.lod_tensor)
clear_has_lod_tensor(); clear_has_lod_tensor();
::paddle_mobile::framework::proto::VarType_LoDTensorDesc *temp = ::paddle_mobile::framework::proto::VarType_LoDTensorDesc *temp = lod_tensor_;
lod_tensor_;
lod_tensor_ = NULL; lod_tensor_ = NULL;
return temp; return temp;
} }
...@@ -6729,8 +6643,7 @@ VarType::tensor_array() const { ...@@ -6729,8 +6643,7 @@ VarType::tensor_array() const {
const ::paddle_mobile::framework::proto::VarType_LoDTensorArrayDesc *p = const ::paddle_mobile::framework::proto::VarType_LoDTensorArrayDesc *p =
tensor_array_; tensor_array_;
// @@protoc_insertion_point(field_get:paddle_mobile.framework.proto.VarType.tensor_array) // @@protoc_insertion_point(field_get:paddle_mobile.framework.proto.VarType.tensor_array)
return p != NULL return p != NULL ? *p
? *p
: *reinterpret_cast<const ::paddle_mobile::framework::proto:: : *reinterpret_cast<const ::paddle_mobile::framework::proto::
VarType_LoDTensorArrayDesc *>( VarType_LoDTensorArrayDesc *>(
&::paddle_mobile::framework::proto:: &::paddle_mobile::framework::proto::
...@@ -6825,18 +6738,18 @@ void VarType::set_has_channel() { _has_bits_[0] |= 0x00000010u; } ...@@ -6825,18 +6738,18 @@ void VarType::set_has_channel() { _has_bits_[0] |= 0x00000010u; }
void VarType::clear_has_channel() { _has_bits_[0] &= ~0x00000010u; } void VarType::clear_has_channel() { _has_bits_[0] &= ~0x00000010u; }
void VarType::clear_channel() { void VarType::clear_channel() {
if (channel_ != NULL) if (channel_ != NULL)
channel_ channel_->::paddle_mobile::framework::proto::VarType_ChannelDesc::Clear();
->::paddle_mobile::framework::proto::VarType_ChannelDesc::Clear();
clear_has_channel(); clear_has_channel();
} }
const ::paddle_mobile::framework::proto::VarType_ChannelDesc & const ::paddle_mobile::framework::proto::VarType_ChannelDesc &
VarType::channel() const { VarType::channel() const {
const ::paddle_mobile::framework::proto::VarType_ChannelDesc *p = channel_; const ::paddle_mobile::framework::proto::VarType_ChannelDesc *p = channel_;
// @@protoc_insertion_point(field_get:paddle_mobile.framework.proto.VarType.channel) // @@protoc_insertion_point(field_get:paddle_mobile.framework.proto.VarType.channel)
return p != NULL ? *p return p != NULL
: *reinterpret_cast<const ::paddle_mobile::framework:: ? *p
proto::VarType_ChannelDesc *>( : *reinterpret_cast<
&::paddle_mobile::framework::proto:: const ::paddle_mobile::framework::proto::VarType_ChannelDesc
*>(&::paddle_mobile::framework::proto::
_VarType_ChannelDesc_default_instance_); _VarType_ChannelDesc_default_instance_);
} }
::paddle_mobile::framework::proto::VarType_ChannelDesc * ::paddle_mobile::framework::proto::VarType_ChannelDesc *
...@@ -7035,8 +6948,8 @@ bool VarDesc::MergePartialFromCodedStream( ...@@ -7035,8 +6948,8 @@ bool VarDesc::MergePartialFromCodedStream(
tag = p.first; tag = p.first;
if (!p.second) if (!p.second)
goto handle_unusual; goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber( switch (
tag)) { ::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// required string name = 1; // required string name = 1;
case 1: { case 1: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
...@@ -7053,8 +6966,8 @@ bool VarDesc::MergePartialFromCodedStream( ...@@ -7053,8 +6966,8 @@ bool VarDesc::MergePartialFromCodedStream(
case 2: { case 2: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(18u /* 18 & 0xFF */)) { static_cast<::google::protobuf::uint8>(18u /* 18 & 0xFF */)) {
DO_(::google::protobuf::internal::WireFormatLite:: DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
ReadMessageNoVirtual(input, mutable_type())); input, mutable_type()));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -7066,10 +6979,9 @@ bool VarDesc::MergePartialFromCodedStream( ...@@ -7066,10 +6979,9 @@ bool VarDesc::MergePartialFromCodedStream(
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(24u /* 24 & 0xFF */)) { static_cast<::google::protobuf::uint8>(24u /* 24 & 0xFF */)) {
set_has_persistable(); set_has_persistable();
DO_(( DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::internal::WireFormatLite::ReadPrimitive< bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>(
bool, ::google::protobuf::internal::WireFormatLite:: input, &persistable_)));
TYPE_BOOL>(input, &persistable_)));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -7111,8 +7023,8 @@ void VarDesc::SerializeWithCachedSizes( ...@@ -7111,8 +7023,8 @@ void VarDesc::SerializeWithCachedSizes(
// required .paddle_mobile.framework.proto.VarType type = 2; // required .paddle_mobile.framework.proto.VarType type = 2;
if (cached_has_bits & 0x00000002u) { if (cached_has_bits & 0x00000002u) {
::google::protobuf::internal::WireFormatLite::WriteMessage( ::google::protobuf::internal::WireFormatLite::WriteMessage(2, *this->type_,
2, *this->type_, output); output);
} }
// optional bool persistable = 3 [default = false]; // optional bool persistable = 3 [default = false];
...@@ -7133,16 +7045,14 @@ size_t VarDesc::RequiredFieldsByteSizeFallback() const { ...@@ -7133,16 +7045,14 @@ size_t VarDesc::RequiredFieldsByteSizeFallback() const {
if (has_name()) { if (has_name()) {
// required string name = 1; // required string name = 1;
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::StringSize(
1 + ::google::protobuf::internal::WireFormatLite::StringSize(
this->name()); this->name());
} }
if (has_type()) { if (has_type()) {
// required .paddle_mobile.framework.proto.VarType type = 2; // required .paddle_mobile.framework.proto.VarType type = 2;
total_size += total_size +=
1 + 1 + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
*this->type_); *this->type_);
} }
...@@ -7157,14 +7067,12 @@ size_t VarDesc::ByteSizeLong() const { ...@@ -7157,14 +7067,12 @@ size_t VarDesc::ByteSizeLong() const {
if (((_has_bits_[0] & 0x00000003) ^ 0x00000003) == if (((_has_bits_[0] & 0x00000003) ^ 0x00000003) ==
0) { // All required fields are present. 0) { // All required fields are present.
// required string name = 1; // required string name = 1;
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::StringSize(
1 + ::google::protobuf::internal::WireFormatLite::StringSize(
this->name()); this->name());
// required .paddle_mobile.framework.proto.VarType type = 2; // required .paddle_mobile.framework.proto.VarType type = 2;
total_size += total_size +=
1 + 1 + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
*this->type_); *this->type_);
} else { } else {
...@@ -7203,8 +7111,7 @@ void VarDesc::MergeFrom(const VarDesc &from) { ...@@ -7203,8 +7111,7 @@ void VarDesc::MergeFrom(const VarDesc &from) {
from.name_); from.name_);
} }
if (cached_has_bits & 0x00000002u) { if (cached_has_bits & 0x00000002u) {
mutable_type() mutable_type()->::paddle_mobile::framework::proto::VarType::MergeFrom(
->::paddle_mobile::framework::proto::VarType::MergeFrom(
from.type()); from.type());
} }
if (cached_has_bits & 0x00000004u) { if (cached_has_bits & 0x00000004u) {
...@@ -7269,15 +7176,14 @@ const ::std::string &VarDesc::name() const { ...@@ -7269,15 +7176,14 @@ const ::std::string &VarDesc::name() const {
} }
void VarDesc::set_name(const ::std::string &value) { void VarDesc::set_name(const ::std::string &value) {
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); value);
// @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.VarDesc.name) // @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.VarDesc.name)
} }
#if LANG_CXX11 #if LANG_CXX11
void VarDesc::set_name(::std::string &&value) { void VarDesc::set_name(::std::string &&value) {
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::move(value)); ::std::move(value));
// @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.VarDesc.name) // @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.VarDesc.name)
} }
...@@ -7285,15 +7191,13 @@ void VarDesc::set_name(::std::string &&value) { ...@@ -7285,15 +7191,13 @@ void VarDesc::set_name(::std::string &&value) {
void VarDesc::set_name(const char *value) { void VarDesc::set_name(const char *value) {
GOOGLE_DCHECK(value != NULL); GOOGLE_DCHECK(value != NULL);
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(value)); ::std::string(value));
// @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.VarDesc.name) // @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.VarDesc.name)
} }
void VarDesc::set_name(const char *value, size_t size) { void VarDesc::set_name(const char *value, size_t size) {
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(reinterpret_cast<const char *>(value), size)); ::std::string(reinterpret_cast<const char *>(value), size));
// @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.VarDesc.name) // @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.VarDesc.name)
} }
...@@ -7491,18 +7395,17 @@ bool BlockDesc::MergePartialFromCodedStream( ...@@ -7491,18 +7395,17 @@ bool BlockDesc::MergePartialFromCodedStream(
tag = p.first; tag = p.first;
if (!p.second) if (!p.second)
goto handle_unusual; goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber( switch (
tag)) { ::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// required int32 idx = 1; // required int32 idx = 1;
case 1: { case 1: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(8u /* 8 & 0xFF */)) { static_cast<::google::protobuf::uint8>(8u /* 8 & 0xFF */)) {
set_has_idx(); set_has_idx();
DO_(( DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::int32, ::google::protobuf::int32,
::google::protobuf::internal::WireFormatLite:: ::google::protobuf::internal::WireFormatLite::TYPE_INT32>(input,
TYPE_INT32>(input, &idx_))); &idx_)));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -7514,11 +7417,10 @@ bool BlockDesc::MergePartialFromCodedStream( ...@@ -7514,11 +7417,10 @@ bool BlockDesc::MergePartialFromCodedStream(
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(16u /* 16 & 0xFF */)) { static_cast<::google::protobuf::uint8>(16u /* 16 & 0xFF */)) {
set_has_parent_idx(); set_has_parent_idx();
DO_(( DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::int32, ::google::protobuf::int32,
::google::protobuf::internal::WireFormatLite:: ::google::protobuf::internal::WireFormatLite::TYPE_INT32>(
TYPE_INT32>(input, &parent_idx_))); input, &parent_idx_)));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -7529,8 +7431,8 @@ bool BlockDesc::MergePartialFromCodedStream( ...@@ -7529,8 +7431,8 @@ bool BlockDesc::MergePartialFromCodedStream(
case 3: { case 3: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(26u /* 26 & 0xFF */)) { static_cast<::google::protobuf::uint8>(26u /* 26 & 0xFF */)) {
DO_(::google::protobuf::internal::WireFormatLite:: DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
ReadMessageNoVirtual(input, add_vars())); input, add_vars()));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -7541,8 +7443,8 @@ bool BlockDesc::MergePartialFromCodedStream( ...@@ -7541,8 +7443,8 @@ bool BlockDesc::MergePartialFromCodedStream(
case 4: { case 4: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(34u /* 34 & 0xFF */)) { static_cast<::google::protobuf::uint8>(34u /* 34 & 0xFF */)) {
DO_(::google::protobuf::internal::WireFormatLite:: DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
ReadMessageNoVirtual(input, add_ops())); input, add_ops()));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -7554,11 +7456,10 @@ bool BlockDesc::MergePartialFromCodedStream( ...@@ -7554,11 +7456,10 @@ bool BlockDesc::MergePartialFromCodedStream(
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(40u /* 40 & 0xFF */)) { static_cast<::google::protobuf::uint8>(40u /* 40 & 0xFF */)) {
set_has_forward_block_idx(); set_has_forward_block_idx();
DO_(( DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::int32, ::google::protobuf::int32,
::google::protobuf::internal::WireFormatLite:: ::google::protobuf::internal::WireFormatLite::TYPE_INT32>(
TYPE_INT32>(input, &forward_block_idx_))); input, &forward_block_idx_)));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -7636,15 +7537,13 @@ size_t BlockDesc::RequiredFieldsByteSizeFallback() const { ...@@ -7636,15 +7537,13 @@ size_t BlockDesc::RequiredFieldsByteSizeFallback() const {
if (has_idx()) { if (has_idx()) {
// required int32 idx = 1; // required int32 idx = 1;
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::Int32Size(
1 + ::google::protobuf::internal::WireFormatLite::Int32Size(
this->idx()); this->idx());
} }
if (has_parent_idx()) { if (has_parent_idx()) {
// required int32 parent_idx = 2; // required int32 parent_idx = 2;
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::Int32Size(
1 + ::google::protobuf::internal::WireFormatLite::Int32Size(
this->parent_idx()); this->parent_idx());
} }
...@@ -7659,13 +7558,11 @@ size_t BlockDesc::ByteSizeLong() const { ...@@ -7659,13 +7558,11 @@ size_t BlockDesc::ByteSizeLong() const {
if (((_has_bits_[0] & 0x00000003) ^ 0x00000003) == if (((_has_bits_[0] & 0x00000003) ^ 0x00000003) ==
0) { // All required fields are present. 0) { // All required fields are present.
// required int32 idx = 1; // required int32 idx = 1;
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::Int32Size(
1 + ::google::protobuf::internal::WireFormatLite::Int32Size(
this->idx()); this->idx());
// required int32 parent_idx = 2; // required int32 parent_idx = 2;
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::Int32Size(
1 + ::google::protobuf::internal::WireFormatLite::Int32Size(
this->parent_idx()); this->parent_idx());
} else { } else {
...@@ -7676,8 +7573,9 @@ size_t BlockDesc::ByteSizeLong() const { ...@@ -7676,8 +7573,9 @@ size_t BlockDesc::ByteSizeLong() const {
unsigned int count = static_cast<unsigned int>(this->vars_size()); unsigned int count = static_cast<unsigned int>(this->vars_size());
total_size += 1UL * count; total_size += 1UL * count;
for (unsigned int i = 0; i < count; i++) { for (unsigned int i = 0; i < count; i++) {
total_size += ::google::protobuf::internal::WireFormatLite:: total_size +=
MessageSizeNoVirtual(this->vars(static_cast<int>(i))); ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->vars(static_cast<int>(i)));
} }
} }
...@@ -7686,15 +7584,15 @@ size_t BlockDesc::ByteSizeLong() const { ...@@ -7686,15 +7584,15 @@ size_t BlockDesc::ByteSizeLong() const {
unsigned int count = static_cast<unsigned int>(this->ops_size()); unsigned int count = static_cast<unsigned int>(this->ops_size());
total_size += 1UL * count; total_size += 1UL * count;
for (unsigned int i = 0; i < count; i++) { for (unsigned int i = 0; i < count; i++) {
total_size += ::google::protobuf::internal::WireFormatLite:: total_size +=
MessageSizeNoVirtual(this->ops(static_cast<int>(i))); ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->ops(static_cast<int>(i)));
} }
} }
// optional int32 forward_block_idx = 5 [default = -1]; // optional int32 forward_block_idx = 5 [default = -1];
if (has_forward_block_idx()) { if (has_forward_block_idx()) {
total_size += total_size += 1 + ::google::protobuf::internal::WireFormatLite::Int32Size(
1 + ::google::protobuf::internal::WireFormatLite::Int32Size(
this->forward_block_idx()); this->forward_block_idx());
} }
...@@ -7971,15 +7869,15 @@ bool ProgramDesc::MergePartialFromCodedStream( ...@@ -7971,15 +7869,15 @@ bool ProgramDesc::MergePartialFromCodedStream(
tag = p.first; tag = p.first;
if (!p.second) if (!p.second)
goto handle_unusual; goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber( switch (
tag)) { ::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// repeated .paddle_mobile.framework.proto.BlockDesc blocks // repeated .paddle_mobile.framework.proto.BlockDesc blocks
// = 1; // = 1;
case 1: { case 1: {
if (static_cast<::google::protobuf::uint8>(tag) == if (static_cast<::google::protobuf::uint8>(tag) ==
static_cast<::google::protobuf::uint8>(10u /* 10 & 0xFF */)) { static_cast<::google::protobuf::uint8>(10u /* 10 & 0xFF */)) {
DO_(::google::protobuf::internal::WireFormatLite:: DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
ReadMessageNoVirtual(input, add_blocks())); input, add_blocks()));
} else { } else {
goto handle_unusual; goto handle_unusual;
} }
...@@ -8036,8 +7934,9 @@ size_t ProgramDesc::ByteSizeLong() const { ...@@ -8036,8 +7934,9 @@ size_t ProgramDesc::ByteSizeLong() const {
unsigned int count = static_cast<unsigned int>(this->blocks_size()); unsigned int count = static_cast<unsigned int>(this->blocks_size());
total_size += 1UL * count; total_size += 1UL * count;
for (unsigned int i = 0; i < count; i++) { for (unsigned int i = 0; i < count; i++) {
total_size += ::google::protobuf::internal::WireFormatLite:: total_size +=
MessageSizeNoVirtual(this->blocks(static_cast<int>(i))); ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->blocks(static_cast<int>(i)));
} }
} }
......
...@@ -160,7 +160,7 @@ class OpDesc_Attr ...@@ -160,7 +160,7 @@ class OpDesc_Attr
MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.OpDesc.Attr) MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.OpDesc.Attr)
*/ */
{ {
public: public:
OpDesc_Attr(); OpDesc_Attr();
virtual ~OpDesc_Attr(); virtual ~OpDesc_Attr();
...@@ -224,17 +224,17 @@ class OpDesc_Attr ...@@ -224,17 +224,17 @@ class OpDesc_Attr
void DiscardUnknownFields(); void DiscardUnknownFields();
int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; } int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; }
private: private:
void SharedCtor(); void SharedCtor();
void SharedDtor(); void SharedDtor();
void SetCachedSize(int size) const; void SetCachedSize(int size) const;
void InternalSwap(OpDesc_Attr *other); void InternalSwap(OpDesc_Attr *other);
private: private:
inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; } inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; }
inline void *MaybeArenaPtr() const { return NULL; } inline void *MaybeArenaPtr() const { return NULL; }
public: public:
::std::string GetTypeName() const PROTOBUF_FINAL; ::std::string GetTypeName() const PROTOBUF_FINAL;
// nested types // nested types
...@@ -252,8 +252,7 @@ class OpDesc_Attr ...@@ -252,8 +252,7 @@ class OpDesc_Attr
void add_ints(::google::protobuf::int32 value); void add_ints(::google::protobuf::int32 value);
const ::google::protobuf::RepeatedField<::google::protobuf::int32> & const ::google::protobuf::RepeatedField<::google::protobuf::int32> &
ints() const; ints() const;
::google::protobuf::RepeatedField<::google::protobuf::int32> * ::google::protobuf::RepeatedField<::google::protobuf::int32> *mutable_ints();
mutable_ints();
// repeated float floats = 7; // repeated float floats = 7;
int floats_size() const; int floats_size() const;
...@@ -370,7 +369,7 @@ class OpDesc_Attr ...@@ -370,7 +369,7 @@ class OpDesc_Attr
void set_block_idx(::google::protobuf::int32 value); void set_block_idx(::google::protobuf::int32 value);
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.OpDesc.Attr) // @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.OpDesc.Attr)
private: private:
void set_has_name(); void set_has_name();
void clear_has_name(); void clear_has_name();
void set_has_type(); void set_has_type();
...@@ -416,7 +415,7 @@ class OpDesc_Var ...@@ -416,7 +415,7 @@ class OpDesc_Var
MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.OpDesc.Var) MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.OpDesc.Var)
*/ */
{ {
public: public:
OpDesc_Var(); OpDesc_Var();
virtual ~OpDesc_Var(); virtual ~OpDesc_Var();
...@@ -451,8 +450,7 @@ class OpDesc_Var ...@@ -451,8 +450,7 @@ class OpDesc_Var
static const OpDesc_Var &default_instance(); static const OpDesc_Var &default_instance();
static inline const OpDesc_Var *internal_default_instance() { static inline const OpDesc_Var *internal_default_instance() {
return reinterpret_cast<const OpDesc_Var *>( return reinterpret_cast<const OpDesc_Var *>(&_OpDesc_Var_default_instance_);
&_OpDesc_Var_default_instance_);
} }
static PROTOBUF_CONSTEXPR int const kIndexInFileMessages = 1; static PROTOBUF_CONSTEXPR int const kIndexInFileMessages = 1;
...@@ -480,17 +478,17 @@ class OpDesc_Var ...@@ -480,17 +478,17 @@ class OpDesc_Var
void DiscardUnknownFields(); void DiscardUnknownFields();
int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; } int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; }
private: private:
void SharedCtor(); void SharedCtor();
void SharedDtor(); void SharedDtor();
void SetCachedSize(int size) const; void SetCachedSize(int size) const;
void InternalSwap(OpDesc_Var *other); void InternalSwap(OpDesc_Var *other);
private: private:
inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; } inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; }
inline void *MaybeArenaPtr() const { return NULL; } inline void *MaybeArenaPtr() const { return NULL; }
public: public:
::std::string GetTypeName() const PROTOBUF_FINAL; ::std::string GetTypeName() const PROTOBUF_FINAL;
// nested types // nested types
...@@ -518,8 +516,7 @@ class OpDesc_Var ...@@ -518,8 +516,7 @@ class OpDesc_Var
#endif #endif
void add_arguments(const char *value); void add_arguments(const char *value);
void add_arguments(const char *value, size_t size); void add_arguments(const char *value, size_t size);
const ::google::protobuf::RepeatedPtrField<::std::string> & const ::google::protobuf::RepeatedPtrField<::std::string> &arguments() const;
arguments() const;
::google::protobuf::RepeatedPtrField<::std::string> *mutable_arguments(); ::google::protobuf::RepeatedPtrField<::std::string> *mutable_arguments();
// required string parameter = 1; // required string parameter = 1;
...@@ -538,7 +535,7 @@ class OpDesc_Var ...@@ -538,7 +535,7 @@ class OpDesc_Var
void set_allocated_parameter(::std::string *parameter); void set_allocated_parameter(::std::string *parameter);
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.OpDesc.Var) // @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.OpDesc.Var)
private: private:
void set_has_parameter(); void set_has_parameter();
void clear_has_parameter(); void clear_has_parameter();
...@@ -557,7 +554,7 @@ class OpDesc ...@@ -557,7 +554,7 @@ class OpDesc
MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.OpDesc) MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.OpDesc)
*/ */
{ {
public: public:
OpDesc(); OpDesc();
virtual ~OpDesc(); virtual ~OpDesc();
...@@ -618,17 +615,17 @@ class OpDesc ...@@ -618,17 +615,17 @@ class OpDesc
void DiscardUnknownFields(); void DiscardUnknownFields();
int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; } int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; }
private: private:
void SharedCtor(); void SharedCtor();
void SharedDtor(); void SharedDtor();
void SetCachedSize(int size) const; void SetCachedSize(int size) const;
void InternalSwap(OpDesc *other); void InternalSwap(OpDesc *other);
private: private:
inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; } inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; }
inline void *MaybeArenaPtr() const { return NULL; } inline void *MaybeArenaPtr() const { return NULL; }
public: public:
::std::string GetTypeName() const PROTOBUF_FINAL; ::std::string GetTypeName() const PROTOBUF_FINAL;
// nested types // nested types
...@@ -645,8 +642,7 @@ class OpDesc ...@@ -645,8 +642,7 @@ class OpDesc
int inputs_size() const; int inputs_size() const;
void clear_inputs(); void clear_inputs();
static const int kInputsFieldNumber = 1; static const int kInputsFieldNumber = 1;
const ::paddle_mobile::framework::proto::OpDesc_Var & const ::paddle_mobile::framework::proto::OpDesc_Var &inputs(int index) const;
inputs(int index) const;
::paddle_mobile::framework::proto::OpDesc_Var *mutable_inputs(int index); ::paddle_mobile::framework::proto::OpDesc_Var *mutable_inputs(int index);
::paddle_mobile::framework::proto::OpDesc_Var *add_inputs(); ::paddle_mobile::framework::proto::OpDesc_Var *add_inputs();
::google::protobuf::RepeatedPtrField< ::google::protobuf::RepeatedPtrField<
...@@ -661,8 +657,7 @@ class OpDesc ...@@ -661,8 +657,7 @@ class OpDesc
int outputs_size() const; int outputs_size() const;
void clear_outputs(); void clear_outputs();
static const int kOutputsFieldNumber = 2; static const int kOutputsFieldNumber = 2;
const ::paddle_mobile::framework::proto::OpDesc_Var & const ::paddle_mobile::framework::proto::OpDesc_Var &outputs(int index) const;
outputs(int index) const;
::paddle_mobile::framework::proto::OpDesc_Var *mutable_outputs(int index); ::paddle_mobile::framework::proto::OpDesc_Var *mutable_outputs(int index);
::paddle_mobile::framework::proto::OpDesc_Var *add_outputs(); ::paddle_mobile::framework::proto::OpDesc_Var *add_outputs();
::google::protobuf::RepeatedPtrField< ::google::protobuf::RepeatedPtrField<
...@@ -677,8 +672,7 @@ class OpDesc ...@@ -677,8 +672,7 @@ class OpDesc
int attrs_size() const; int attrs_size() const;
void clear_attrs(); void clear_attrs();
static const int kAttrsFieldNumber = 4; static const int kAttrsFieldNumber = 4;
const ::paddle_mobile::framework::proto::OpDesc_Attr & const ::paddle_mobile::framework::proto::OpDesc_Attr &attrs(int index) const;
attrs(int index) const;
::paddle_mobile::framework::proto::OpDesc_Attr *mutable_attrs(int index); ::paddle_mobile::framework::proto::OpDesc_Attr *mutable_attrs(int index);
::paddle_mobile::framework::proto::OpDesc_Attr *add_attrs(); ::paddle_mobile::framework::proto::OpDesc_Attr *add_attrs();
::google::protobuf::RepeatedPtrField< ::google::protobuf::RepeatedPtrField<
...@@ -711,7 +705,7 @@ class OpDesc ...@@ -711,7 +705,7 @@ class OpDesc
void set_is_target(bool value); void set_is_target(bool value);
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.OpDesc) // @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.OpDesc)
private: private:
void set_has_type(); void set_has_type();
void clear_has_type(); void clear_has_type();
void set_has_is_target(); void set_has_is_target();
...@@ -741,7 +735,7 @@ class OpProto_Var ...@@ -741,7 +735,7 @@ class OpProto_Var
MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.OpProto.Var) MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.OpProto.Var)
*/ */
{ {
public: public:
OpProto_Var(); OpProto_Var();
virtual ~OpProto_Var(); virtual ~OpProto_Var();
...@@ -805,17 +799,17 @@ class OpProto_Var ...@@ -805,17 +799,17 @@ class OpProto_Var
void DiscardUnknownFields(); void DiscardUnknownFields();
int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; } int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; }
private: private:
void SharedCtor(); void SharedCtor();
void SharedDtor(); void SharedDtor();
void SetCachedSize(int size) const; void SetCachedSize(int size) const;
void InternalSwap(OpProto_Var *other); void InternalSwap(OpProto_Var *other);
private: private:
inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; } inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; }
inline void *MaybeArenaPtr() const { return NULL; } inline void *MaybeArenaPtr() const { return NULL; }
public: public:
::std::string GetTypeName() const PROTOBUF_FINAL; ::std::string GetTypeName() const PROTOBUF_FINAL;
// nested types // nested types
...@@ -876,7 +870,7 @@ class OpProto_Var ...@@ -876,7 +870,7 @@ class OpProto_Var
void set_dispensable(bool value); void set_dispensable(bool value);
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.OpProto.Var) // @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.OpProto.Var)
private: private:
void set_has_name(); void set_has_name();
void clear_has_name(); void clear_has_name();
void set_has_comment(); void set_has_comment();
...@@ -909,7 +903,7 @@ class OpProto_Attr ...@@ -909,7 +903,7 @@ class OpProto_Attr
MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.OpProto.Attr) MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.OpProto.Attr)
*/ */
{ {
public: public:
OpProto_Attr(); OpProto_Attr();
virtual ~OpProto_Attr(); virtual ~OpProto_Attr();
...@@ -973,17 +967,17 @@ class OpProto_Attr ...@@ -973,17 +967,17 @@ class OpProto_Attr
void DiscardUnknownFields(); void DiscardUnknownFields();
int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; } int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; }
private: private:
void SharedCtor(); void SharedCtor();
void SharedDtor(); void SharedDtor();
void SetCachedSize(int size) const; void SetCachedSize(int size) const;
void InternalSwap(OpProto_Attr *other); void InternalSwap(OpProto_Attr *other);
private: private:
inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; } inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; }
inline void *MaybeArenaPtr() const { return NULL; } inline void *MaybeArenaPtr() const { return NULL; }
public: public:
::std::string GetTypeName() const PROTOBUF_FINAL; ::std::string GetTypeName() const PROTOBUF_FINAL;
// nested types // nested types
...@@ -1037,7 +1031,7 @@ class OpProto_Attr ...@@ -1037,7 +1031,7 @@ class OpProto_Attr
void set_generated(bool value); void set_generated(bool value);
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.OpProto.Attr) // @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.OpProto.Attr)
private: private:
void set_has_name(); void set_has_name();
void clear_has_name(); void clear_has_name();
void set_has_type(); void set_has_type();
...@@ -1067,7 +1061,7 @@ class OpProto ...@@ -1067,7 +1061,7 @@ class OpProto
MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.OpProto) MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.OpProto)
*/ */
{ {
public: public:
OpProto(); OpProto();
virtual ~OpProto(); virtual ~OpProto();
...@@ -1128,17 +1122,17 @@ class OpProto ...@@ -1128,17 +1122,17 @@ class OpProto
void DiscardUnknownFields(); void DiscardUnknownFields();
int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; } int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; }
private: private:
void SharedCtor(); void SharedCtor();
void SharedDtor(); void SharedDtor();
void SetCachedSize(int size) const; void SetCachedSize(int size) const;
void InternalSwap(OpProto *other); void InternalSwap(OpProto *other);
private: private:
inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; } inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; }
inline void *MaybeArenaPtr() const { return NULL; } inline void *MaybeArenaPtr() const { return NULL; }
public: public:
::std::string GetTypeName() const PROTOBUF_FINAL; ::std::string GetTypeName() const PROTOBUF_FINAL;
// nested types // nested types
...@@ -1155,8 +1149,7 @@ class OpProto ...@@ -1155,8 +1149,7 @@ class OpProto
int inputs_size() const; int inputs_size() const;
void clear_inputs(); void clear_inputs();
static const int kInputsFieldNumber = 2; static const int kInputsFieldNumber = 2;
const ::paddle_mobile::framework::proto::OpProto_Var & const ::paddle_mobile::framework::proto::OpProto_Var &inputs(int index) const;
inputs(int index) const;
::paddle_mobile::framework::proto::OpProto_Var *mutable_inputs(int index); ::paddle_mobile::framework::proto::OpProto_Var *mutable_inputs(int index);
::paddle_mobile::framework::proto::OpProto_Var *add_inputs(); ::paddle_mobile::framework::proto::OpProto_Var *add_inputs();
::google::protobuf::RepeatedPtrField< ::google::protobuf::RepeatedPtrField<
...@@ -1187,8 +1180,7 @@ class OpProto ...@@ -1187,8 +1180,7 @@ class OpProto
int attrs_size() const; int attrs_size() const;
void clear_attrs(); void clear_attrs();
static const int kAttrsFieldNumber = 4; static const int kAttrsFieldNumber = 4;
const ::paddle_mobile::framework::proto::OpProto_Attr & const ::paddle_mobile::framework::proto::OpProto_Attr &attrs(int index) const;
attrs(int index) const;
::paddle_mobile::framework::proto::OpProto_Attr *mutable_attrs(int index); ::paddle_mobile::framework::proto::OpProto_Attr *mutable_attrs(int index);
::paddle_mobile::framework::proto::OpProto_Attr *add_attrs(); ::paddle_mobile::framework::proto::OpProto_Attr *add_attrs();
::google::protobuf::RepeatedPtrField< ::google::protobuf::RepeatedPtrField<
...@@ -1229,7 +1221,7 @@ class OpProto ...@@ -1229,7 +1221,7 @@ class OpProto
void set_allocated_comment(::std::string *comment); void set_allocated_comment(::std::string *comment);
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.OpProto) // @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.OpProto)
private: private:
void set_has_type(); void set_has_type();
void clear_has_type(); void clear_has_type();
void set_has_comment(); void set_has_comment();
...@@ -1262,7 +1254,7 @@ class VarType_TensorDesc ...@@ -1262,7 +1254,7 @@ class VarType_TensorDesc
MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.VarType.TensorDesc) MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.VarType.TensorDesc)
*/ */
{ {
public: public:
VarType_TensorDesc(); VarType_TensorDesc();
virtual ~VarType_TensorDesc(); virtual ~VarType_TensorDesc();
...@@ -1304,9 +1296,7 @@ class VarType_TensorDesc ...@@ -1304,9 +1296,7 @@ class VarType_TensorDesc
static PROTOBUF_CONSTEXPR int const kIndexInFileMessages = 6; static PROTOBUF_CONSTEXPR int const kIndexInFileMessages = 6;
void Swap(VarType_TensorDesc *other); void Swap(VarType_TensorDesc *other);
friend void swap(VarType_TensorDesc &a, VarType_TensorDesc &b) { friend void swap(VarType_TensorDesc &a, VarType_TensorDesc &b) { a.Swap(&b); }
a.Swap(&b);
}
// implements Message // implements Message
// ---------------------------------------------- // ----------------------------------------------
...@@ -1330,17 +1320,17 @@ class VarType_TensorDesc ...@@ -1330,17 +1320,17 @@ class VarType_TensorDesc
void DiscardUnknownFields(); void DiscardUnknownFields();
int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; } int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; }
private: private:
void SharedCtor(); void SharedCtor();
void SharedDtor(); void SharedDtor();
void SetCachedSize(int size) const; void SetCachedSize(int size) const;
void InternalSwap(VarType_TensorDesc *other); void InternalSwap(VarType_TensorDesc *other);
private: private:
inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; } inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; }
inline void *MaybeArenaPtr() const { return NULL; } inline void *MaybeArenaPtr() const { return NULL; }
public: public:
::std::string GetTypeName() const PROTOBUF_FINAL; ::std::string GetTypeName() const PROTOBUF_FINAL;
// nested types // nested types
...@@ -1358,8 +1348,7 @@ class VarType_TensorDesc ...@@ -1358,8 +1348,7 @@ class VarType_TensorDesc
void add_dims(::google::protobuf::int64 value); void add_dims(::google::protobuf::int64 value);
const ::google::protobuf::RepeatedField<::google::protobuf::int64> & const ::google::protobuf::RepeatedField<::google::protobuf::int64> &
dims() const; dims() const;
::google::protobuf::RepeatedField<::google::protobuf::int64> * ::google::protobuf::RepeatedField<::google::protobuf::int64> *mutable_dims();
mutable_dims();
// required .paddle_mobile.framework.proto.VarType.Type // required .paddle_mobile.framework.proto.VarType.Type
// data_type = 1; // data_type = 1;
...@@ -1370,7 +1359,7 @@ class VarType_TensorDesc ...@@ -1370,7 +1359,7 @@ class VarType_TensorDesc
void set_data_type(::paddle_mobile::framework::proto::VarType_Type value); void set_data_type(::paddle_mobile::framework::proto::VarType_Type value);
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.VarType.TensorDesc) // @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.VarType.TensorDesc)
private: private:
void set_has_data_type(); void set_has_data_type();
void clear_has_data_type(); void clear_has_data_type();
...@@ -1389,7 +1378,7 @@ class VarType_LoDTensorDesc ...@@ -1389,7 +1378,7 @@ class VarType_LoDTensorDesc
MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.VarType.LoDTensorDesc) MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.VarType.LoDTensorDesc)
*/ */
{ {
public: public:
VarType_LoDTensorDesc(); VarType_LoDTensorDesc();
virtual ~VarType_LoDTensorDesc(); virtual ~VarType_LoDTensorDesc();
...@@ -1439,9 +1428,7 @@ class VarType_LoDTensorDesc ...@@ -1439,9 +1428,7 @@ class VarType_LoDTensorDesc
// implements Message // implements Message
// ---------------------------------------------- // ----------------------------------------------
inline VarType_LoDTensorDesc *New() const PROTOBUF_FINAL { inline VarType_LoDTensorDesc *New() const PROTOBUF_FINAL { return New(NULL); }
return New(NULL);
}
VarType_LoDTensorDesc * VarType_LoDTensorDesc *
New(::google::protobuf::Arena *arena) const PROTOBUF_FINAL; New(::google::protobuf::Arena *arena) const PROTOBUF_FINAL;
...@@ -1460,17 +1447,17 @@ class VarType_LoDTensorDesc ...@@ -1460,17 +1447,17 @@ class VarType_LoDTensorDesc
void DiscardUnknownFields(); void DiscardUnknownFields();
int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; } int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; }
private: private:
void SharedCtor(); void SharedCtor();
void SharedDtor(); void SharedDtor();
void SetCachedSize(int size) const; void SetCachedSize(int size) const;
void InternalSwap(VarType_LoDTensorDesc *other); void InternalSwap(VarType_LoDTensorDesc *other);
private: private:
inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; } inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; }
inline void *MaybeArenaPtr() const { return NULL; } inline void *MaybeArenaPtr() const { return NULL; }
public: public:
::std::string GetTypeName() const PROTOBUF_FINAL; ::std::string GetTypeName() const PROTOBUF_FINAL;
// nested types // nested types
...@@ -1498,7 +1485,7 @@ class VarType_LoDTensorDesc ...@@ -1498,7 +1485,7 @@ class VarType_LoDTensorDesc
void set_lod_level(::google::protobuf::int32 value); void set_lod_level(::google::protobuf::int32 value);
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.VarType.LoDTensorDesc) // @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.VarType.LoDTensorDesc)
private: private:
void set_has_tensor(); void set_has_tensor();
void clear_has_tensor(); void clear_has_tensor();
void set_has_lod_level(); void set_has_lod_level();
...@@ -1519,7 +1506,7 @@ class VarType_LoDTensorArrayDesc ...@@ -1519,7 +1506,7 @@ class VarType_LoDTensorArrayDesc
MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.VarType.LoDTensorArrayDesc) MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.VarType.LoDTensorArrayDesc)
*/ */
{ {
public: public:
VarType_LoDTensorArrayDesc(); VarType_LoDTensorArrayDesc();
virtual ~VarType_LoDTensorArrayDesc(); virtual ~VarType_LoDTensorArrayDesc();
...@@ -1556,8 +1543,7 @@ class VarType_LoDTensorArrayDesc ...@@ -1556,8 +1543,7 @@ class VarType_LoDTensorArrayDesc
static const VarType_LoDTensorArrayDesc &default_instance(); static const VarType_LoDTensorArrayDesc &default_instance();
static inline const VarType_LoDTensorArrayDesc * static inline const VarType_LoDTensorArrayDesc *internal_default_instance() {
internal_default_instance() {
return reinterpret_cast<const VarType_LoDTensorArrayDesc *>( return reinterpret_cast<const VarType_LoDTensorArrayDesc *>(
&_VarType_LoDTensorArrayDesc_default_instance_); &_VarType_LoDTensorArrayDesc_default_instance_);
} }
...@@ -1593,17 +1579,17 @@ class VarType_LoDTensorArrayDesc ...@@ -1593,17 +1579,17 @@ class VarType_LoDTensorArrayDesc
void DiscardUnknownFields(); void DiscardUnknownFields();
int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; } int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; }
private: private:
void SharedCtor(); void SharedCtor();
void SharedDtor(); void SharedDtor();
void SetCachedSize(int size) const; void SetCachedSize(int size) const;
void InternalSwap(VarType_LoDTensorArrayDesc *other); void InternalSwap(VarType_LoDTensorArrayDesc *other);
private: private:
inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; } inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; }
inline void *MaybeArenaPtr() const { return NULL; } inline void *MaybeArenaPtr() const { return NULL; }
public: public:
::std::string GetTypeName() const PROTOBUF_FINAL; ::std::string GetTypeName() const PROTOBUF_FINAL;
// nested types // nested types
...@@ -1631,7 +1617,7 @@ class VarType_LoDTensorArrayDesc ...@@ -1631,7 +1617,7 @@ class VarType_LoDTensorArrayDesc
void set_lod_level(::google::protobuf::int32 value); void set_lod_level(::google::protobuf::int32 value);
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.VarType.LoDTensorArrayDesc) // @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.VarType.LoDTensorArrayDesc)
private: private:
void set_has_tensor(); void set_has_tensor();
void clear_has_tensor(); void clear_has_tensor();
void set_has_lod_level(); void set_has_lod_level();
...@@ -1652,7 +1638,7 @@ class VarType_ReaderDesc ...@@ -1652,7 +1638,7 @@ class VarType_ReaderDesc
MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.VarType.ReaderDesc) MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.VarType.ReaderDesc)
*/ */
{ {
public: public:
VarType_ReaderDesc(); VarType_ReaderDesc();
virtual ~VarType_ReaderDesc(); virtual ~VarType_ReaderDesc();
...@@ -1694,9 +1680,7 @@ class VarType_ReaderDesc ...@@ -1694,9 +1680,7 @@ class VarType_ReaderDesc
static PROTOBUF_CONSTEXPR int const kIndexInFileMessages = 9; static PROTOBUF_CONSTEXPR int const kIndexInFileMessages = 9;
void Swap(VarType_ReaderDesc *other); void Swap(VarType_ReaderDesc *other);
friend void swap(VarType_ReaderDesc &a, VarType_ReaderDesc &b) { friend void swap(VarType_ReaderDesc &a, VarType_ReaderDesc &b) { a.Swap(&b); }
a.Swap(&b);
}
// implements Message // implements Message
// ---------------------------------------------- // ----------------------------------------------
...@@ -1720,17 +1704,17 @@ class VarType_ReaderDesc ...@@ -1720,17 +1704,17 @@ class VarType_ReaderDesc
void DiscardUnknownFields(); void DiscardUnknownFields();
int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; } int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; }
private: private:
void SharedCtor(); void SharedCtor();
void SharedDtor(); void SharedDtor();
void SetCachedSize(int size) const; void SetCachedSize(int size) const;
void InternalSwap(VarType_ReaderDesc *other); void InternalSwap(VarType_ReaderDesc *other);
private: private:
inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; } inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; }
inline void *MaybeArenaPtr() const { return NULL; } inline void *MaybeArenaPtr() const { return NULL; }
public: public:
::std::string GetTypeName() const PROTOBUF_FINAL; ::std::string GetTypeName() const PROTOBUF_FINAL;
// nested types // nested types
...@@ -1758,7 +1742,7 @@ class VarType_ReaderDesc ...@@ -1758,7 +1742,7 @@ class VarType_ReaderDesc
lod_tensor() const; lod_tensor() const;
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.VarType.ReaderDesc) // @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.VarType.ReaderDesc)
private: private:
::google::protobuf::internal::InternalMetadataWithArenaLite ::google::protobuf::internal::InternalMetadataWithArenaLite
_internal_metadata_; _internal_metadata_;
::google::protobuf::internal::HasBits<1> _has_bits_; ::google::protobuf::internal::HasBits<1> _has_bits_;
...@@ -1775,7 +1759,7 @@ class VarType_ChannelDesc ...@@ -1775,7 +1759,7 @@ class VarType_ChannelDesc
MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.VarType.ChannelDesc) MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.VarType.ChannelDesc)
*/ */
{ {
public: public:
VarType_ChannelDesc(); VarType_ChannelDesc();
virtual ~VarType_ChannelDesc(); virtual ~VarType_ChannelDesc();
...@@ -1843,17 +1827,17 @@ class VarType_ChannelDesc ...@@ -1843,17 +1827,17 @@ class VarType_ChannelDesc
void DiscardUnknownFields(); void DiscardUnknownFields();
int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; } int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; }
private: private:
void SharedCtor(); void SharedCtor();
void SharedDtor(); void SharedDtor();
void SetCachedSize(int size) const; void SetCachedSize(int size) const;
void InternalSwap(VarType_ChannelDesc *other); void InternalSwap(VarType_ChannelDesc *other);
private: private:
inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; } inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; }
inline void *MaybeArenaPtr() const { return NULL; } inline void *MaybeArenaPtr() const { return NULL; }
public: public:
::std::string GetTypeName() const PROTOBUF_FINAL; ::std::string GetTypeName() const PROTOBUF_FINAL;
// nested types // nested types
...@@ -1878,7 +1862,7 @@ class VarType_ChannelDesc ...@@ -1878,7 +1862,7 @@ class VarType_ChannelDesc
void set_data_type(::paddle_mobile::framework::proto::VarType_Type value); void set_data_type(::paddle_mobile::framework::proto::VarType_Type value);
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.VarType.ChannelDesc) // @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.VarType.ChannelDesc)
private: private:
void set_has_data_type(); void set_has_data_type();
void clear_has_data_type(); void clear_has_data_type();
void set_has_capacity(); void set_has_capacity();
...@@ -1902,7 +1886,7 @@ class VarType_Tuple ...@@ -1902,7 +1886,7 @@ class VarType_Tuple
MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.VarType.Tuple) MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.VarType.Tuple)
*/ */
{ {
public: public:
VarType_Tuple(); VarType_Tuple();
virtual ~VarType_Tuple(); virtual ~VarType_Tuple();
...@@ -1966,17 +1950,17 @@ class VarType_Tuple ...@@ -1966,17 +1950,17 @@ class VarType_Tuple
void DiscardUnknownFields(); void DiscardUnknownFields();
int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; } int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; }
private: private:
void SharedCtor(); void SharedCtor();
void SharedDtor(); void SharedDtor();
void SetCachedSize(int size) const; void SetCachedSize(int size) const;
void InternalSwap(VarType_Tuple *other); void InternalSwap(VarType_Tuple *other);
private: private:
inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; } inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; }
inline void *MaybeArenaPtr() const { return NULL; } inline void *MaybeArenaPtr() const { return NULL; }
public: public:
::std::string GetTypeName() const PROTOBUF_FINAL; ::std::string GetTypeName() const PROTOBUF_FINAL;
// nested types // nested types
...@@ -1990,18 +1974,15 @@ class VarType_Tuple ...@@ -1990,18 +1974,15 @@ class VarType_Tuple
int element_type_size() const; int element_type_size() const;
void clear_element_type(); void clear_element_type();
static const int kElementTypeFieldNumber = 1; static const int kElementTypeFieldNumber = 1;
::paddle_mobile::framework::proto::VarType_Type ::paddle_mobile::framework::proto::VarType_Type element_type(int index) const;
element_type(int index) const; void set_element_type(int index,
void
set_element_type(int index,
::paddle_mobile::framework::proto::VarType_Type value); ::paddle_mobile::framework::proto::VarType_Type value);
void void add_element_type(::paddle_mobile::framework::proto::VarType_Type value);
add_element_type(::paddle_mobile::framework::proto::VarType_Type value);
const ::google::protobuf::RepeatedField<int> &element_type() const; const ::google::protobuf::RepeatedField<int> &element_type() const;
::google::protobuf::RepeatedField<int> *mutable_element_type(); ::google::protobuf::RepeatedField<int> *mutable_element_type();
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.VarType.Tuple) // @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.VarType.Tuple)
private: private:
::google::protobuf::internal::InternalMetadataWithArenaLite ::google::protobuf::internal::InternalMetadataWithArenaLite
_internal_metadata_; _internal_metadata_;
::google::protobuf::internal::HasBits<1> _has_bits_; ::google::protobuf::internal::HasBits<1> _has_bits_;
...@@ -2016,7 +1997,7 @@ class VarType ...@@ -2016,7 +1997,7 @@ class VarType
MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.VarType) MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.VarType)
*/ */
{ {
public: public:
VarType(); VarType();
virtual ~VarType(); virtual ~VarType();
...@@ -2077,17 +2058,17 @@ class VarType ...@@ -2077,17 +2058,17 @@ class VarType
void DiscardUnknownFields(); void DiscardUnknownFields();
int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; } int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; }
private: private:
void SharedCtor(); void SharedCtor();
void SharedDtor(); void SharedDtor();
void SetCachedSize(int size) const; void SetCachedSize(int size) const;
void InternalSwap(VarType *other); void InternalSwap(VarType *other);
private: private:
inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; } inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; }
inline void *MaybeArenaPtr() const { return NULL; } inline void *MaybeArenaPtr() const { return NULL; }
public: public:
::std::string GetTypeName() const PROTOBUF_FINAL; ::std::string GetTypeName() const PROTOBUF_FINAL;
// nested types // nested types
...@@ -2192,8 +2173,7 @@ class VarType ...@@ -2192,8 +2173,7 @@ class VarType
bool has_channel() const; bool has_channel() const;
void clear_channel(); void clear_channel();
static const int kChannelFieldNumber = 6; static const int kChannelFieldNumber = 6;
const ::paddle_mobile::framework::proto::VarType_ChannelDesc & const ::paddle_mobile::framework::proto::VarType_ChannelDesc &channel() const;
channel() const;
::paddle_mobile::framework::proto::VarType_ChannelDesc *mutable_channel(); ::paddle_mobile::framework::proto::VarType_ChannelDesc *mutable_channel();
::paddle_mobile::framework::proto::VarType_ChannelDesc *release_channel(); ::paddle_mobile::framework::proto::VarType_ChannelDesc *release_channel();
void set_allocated_channel( void set_allocated_channel(
...@@ -2207,8 +2187,8 @@ class VarType ...@@ -2207,8 +2187,8 @@ class VarType
const ::paddle_mobile::framework::proto::VarType_Tuple &tuple() const; const ::paddle_mobile::framework::proto::VarType_Tuple &tuple() const;
::paddle_mobile::framework::proto::VarType_Tuple *mutable_tuple(); ::paddle_mobile::framework::proto::VarType_Tuple *mutable_tuple();
::paddle_mobile::framework::proto::VarType_Tuple *release_tuple(); ::paddle_mobile::framework::proto::VarType_Tuple *release_tuple();
void set_allocated_tuple( void
::paddle_mobile::framework::proto::VarType_Tuple *tuple); set_allocated_tuple(::paddle_mobile::framework::proto::VarType_Tuple *tuple);
// required .paddle_mobile.framework.proto.VarType.Type type = // required .paddle_mobile.framework.proto.VarType.Type type =
// 1; // 1;
...@@ -2219,7 +2199,7 @@ class VarType ...@@ -2219,7 +2199,7 @@ class VarType
void set_type(::paddle_mobile::framework::proto::VarType_Type value); void set_type(::paddle_mobile::framework::proto::VarType_Type value);
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.VarType) // @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.VarType)
private: private:
void set_has_type(); void set_has_type();
void clear_has_type(); void clear_has_type();
void set_has_selected_rows(); void set_has_selected_rows();
...@@ -2241,8 +2221,7 @@ class VarType ...@@ -2241,8 +2221,7 @@ class VarType
mutable int _cached_size_; mutable int _cached_size_;
::paddle_mobile::framework::proto::VarType_TensorDesc *selected_rows_; ::paddle_mobile::framework::proto::VarType_TensorDesc *selected_rows_;
::paddle_mobile::framework::proto::VarType_LoDTensorDesc *lod_tensor_; ::paddle_mobile::framework::proto::VarType_LoDTensorDesc *lod_tensor_;
::paddle_mobile::framework::proto::VarType_LoDTensorArrayDesc ::paddle_mobile::framework::proto::VarType_LoDTensorArrayDesc *tensor_array_;
*tensor_array_;
::paddle_mobile::framework::proto::VarType_ReaderDesc *reader_; ::paddle_mobile::framework::proto::VarType_ReaderDesc *reader_;
::paddle_mobile::framework::proto::VarType_ChannelDesc *channel_; ::paddle_mobile::framework::proto::VarType_ChannelDesc *channel_;
::paddle_mobile::framework::proto::VarType_Tuple *tuple_; ::paddle_mobile::framework::proto::VarType_Tuple *tuple_;
...@@ -2256,7 +2235,7 @@ class VarDesc ...@@ -2256,7 +2235,7 @@ class VarDesc
MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.VarDesc) MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.VarDesc)
*/ */
{ {
public: public:
VarDesc(); VarDesc();
virtual ~VarDesc(); virtual ~VarDesc();
...@@ -2317,17 +2296,17 @@ class VarDesc ...@@ -2317,17 +2296,17 @@ class VarDesc
void DiscardUnknownFields(); void DiscardUnknownFields();
int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; } int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; }
private: private:
void SharedCtor(); void SharedCtor();
void SharedDtor(); void SharedDtor();
void SetCachedSize(int size) const; void SetCachedSize(int size) const;
void InternalSwap(VarDesc *other); void InternalSwap(VarDesc *other);
private: private:
inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; } inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; }
inline void *MaybeArenaPtr() const { return NULL; } inline void *MaybeArenaPtr() const { return NULL; }
public: public:
::std::string GetTypeName() const PROTOBUF_FINAL; ::std::string GetTypeName() const PROTOBUF_FINAL;
// nested types // nested types
...@@ -2368,7 +2347,7 @@ class VarDesc ...@@ -2368,7 +2347,7 @@ class VarDesc
void set_persistable(bool value); void set_persistable(bool value);
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.VarDesc) // @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.VarDesc)
private: private:
void set_has_name(); void set_has_name();
void clear_has_name(); void clear_has_name();
void set_has_type(); void set_has_type();
...@@ -2395,7 +2374,7 @@ class BlockDesc ...@@ -2395,7 +2374,7 @@ class BlockDesc
MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.BlockDesc) MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.BlockDesc)
*/ */
{ {
public: public:
BlockDesc(); BlockDesc();
virtual ~BlockDesc(); virtual ~BlockDesc();
...@@ -2430,8 +2409,7 @@ class BlockDesc ...@@ -2430,8 +2409,7 @@ class BlockDesc
static const BlockDesc &default_instance(); static const BlockDesc &default_instance();
static inline const BlockDesc *internal_default_instance() { static inline const BlockDesc *internal_default_instance() {
return reinterpret_cast<const BlockDesc *>( return reinterpret_cast<const BlockDesc *>(&_BlockDesc_default_instance_);
&_BlockDesc_default_instance_);
} }
static PROTOBUF_CONSTEXPR int const kIndexInFileMessages = 14; static PROTOBUF_CONSTEXPR int const kIndexInFileMessages = 14;
...@@ -2459,17 +2437,17 @@ class BlockDesc ...@@ -2459,17 +2437,17 @@ class BlockDesc
void DiscardUnknownFields(); void DiscardUnknownFields();
int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; } int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; }
private: private:
void SharedCtor(); void SharedCtor();
void SharedDtor(); void SharedDtor();
void SetCachedSize(int size) const; void SetCachedSize(int size) const;
void InternalSwap(BlockDesc *other); void InternalSwap(BlockDesc *other);
private: private:
inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; } inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; }
inline void *MaybeArenaPtr() const { return NULL; } inline void *MaybeArenaPtr() const { return NULL; }
public: public:
::std::string GetTypeName() const PROTOBUF_FINAL; ::std::string GetTypeName() const PROTOBUF_FINAL;
// nested types // nested types
...@@ -2528,7 +2506,7 @@ class BlockDesc ...@@ -2528,7 +2506,7 @@ class BlockDesc
void set_forward_block_idx(::google::protobuf::int32 value); void set_forward_block_idx(::google::protobuf::int32 value);
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.BlockDesc) // @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.BlockDesc)
private: private:
void set_has_idx(); void set_has_idx();
void clear_has_idx(); void clear_has_idx();
void set_has_parent_idx(); void set_has_parent_idx();
...@@ -2561,7 +2539,7 @@ class ProgramDesc ...@@ -2561,7 +2539,7 @@ class ProgramDesc
MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.ProgramDesc) MessageLite /* @@protoc_insertion_point(class_definition:paddle_mobile.framework.proto.ProgramDesc)
*/ */
{ {
public: public:
ProgramDesc(); ProgramDesc();
virtual ~ProgramDesc(); virtual ~ProgramDesc();
...@@ -2625,17 +2603,17 @@ class ProgramDesc ...@@ -2625,17 +2603,17 @@ class ProgramDesc
void DiscardUnknownFields(); void DiscardUnknownFields();
int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; } int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; }
private: private:
void SharedCtor(); void SharedCtor();
void SharedDtor(); void SharedDtor();
void SetCachedSize(int size) const; void SetCachedSize(int size) const;
void InternalSwap(ProgramDesc *other); void InternalSwap(ProgramDesc *other);
private: private:
inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; } inline ::google::protobuf::Arena *GetArenaNoVirtual() const { return NULL; }
inline void *MaybeArenaPtr() const { return NULL; } inline void *MaybeArenaPtr() const { return NULL; }
public: public:
::std::string GetTypeName() const PROTOBUF_FINAL; ::std::string GetTypeName() const PROTOBUF_FINAL;
// nested types // nested types
...@@ -2659,7 +2637,7 @@ class ProgramDesc ...@@ -2659,7 +2637,7 @@ class ProgramDesc
blocks() const; blocks() const;
// @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.ProgramDesc) // @@protoc_insertion_point(class_scope:paddle_mobile.framework.proto.ProgramDesc)
private: private:
::google::protobuf::internal::InternalMetadataWithArenaLite ::google::protobuf::internal::InternalMetadataWithArenaLite
_internal_metadata_; _internal_metadata_;
::google::protobuf::internal::HasBits<1> _has_bits_; ::google::protobuf::internal::HasBits<1> _has_bits_;
...@@ -2697,15 +2675,14 @@ inline const ::std::string &OpDesc_Attr::name() const { ...@@ -2697,15 +2675,14 @@ inline const ::std::string &OpDesc_Attr::name() const {
} }
inline void OpDesc_Attr::set_name(const ::std::string &value) { inline void OpDesc_Attr::set_name(const ::std::string &value) {
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); value);
// @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpDesc.Attr.name) // @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpDesc.Attr.name)
} }
#if LANG_CXX11 #if LANG_CXX11
inline void OpDesc_Attr::set_name(::std::string &&value) { inline void OpDesc_Attr::set_name(::std::string &&value) {
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::move(value)); ::std::move(value));
// @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpDesc.Attr.name) // @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpDesc.Attr.name)
} }
...@@ -2713,15 +2690,13 @@ inline void OpDesc_Attr::set_name(::std::string &&value) { ...@@ -2713,15 +2690,13 @@ inline void OpDesc_Attr::set_name(::std::string &&value) {
inline void OpDesc_Attr::set_name(const char *value) { inline void OpDesc_Attr::set_name(const char *value) {
GOOGLE_DCHECK(value != NULL); GOOGLE_DCHECK(value != NULL);
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(value)); ::std::string(value));
// @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpDesc.Attr.name) // @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpDesc.Attr.name)
} }
inline void OpDesc_Attr::set_name(const char *value, size_t size) { inline void OpDesc_Attr::set_name(const char *value, size_t size) {
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(reinterpret_cast<const char *>(value), size)); ::std::string(reinterpret_cast<const char *>(value), size));
// @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpDesc.Attr.name) // @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpDesc.Attr.name)
} }
...@@ -2954,8 +2929,7 @@ inline void OpDesc_Attr::set_strings(int index, const char *value) { ...@@ -2954,8 +2929,7 @@ inline void OpDesc_Attr::set_strings(int index, const char *value) {
} }
inline void OpDesc_Attr::set_strings(int index, const char *value, inline void OpDesc_Attr::set_strings(int index, const char *value,
size_t size) { size_t size) {
strings_.Mutable(index)->assign(reinterpret_cast<const char *>(value), strings_.Mutable(index)->assign(reinterpret_cast<const char *>(value), size);
size);
// @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpDesc.Attr.strings) // @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpDesc.Attr.strings)
} }
inline ::std::string *OpDesc_Attr::add_strings() { inline ::std::string *OpDesc_Attr::add_strings() {
...@@ -3147,8 +3121,7 @@ inline void OpDesc_Var::set_allocated_parameter(::std::string *parameter) { ...@@ -3147,8 +3121,7 @@ inline void OpDesc_Var::set_allocated_parameter(::std::string *parameter) {
clear_has_parameter(); clear_has_parameter();
} }
parameter_.SetAllocatedNoArena( parameter_.SetAllocatedNoArena(
&::google::protobuf::internal::GetEmptyStringAlreadyInited(), &::google::protobuf::internal::GetEmptyStringAlreadyInited(), parameter);
parameter);
// @@protoc_insertion_point(field_set_allocated:paddle_mobile.framework.proto.OpDesc.Var.parameter) // @@protoc_insertion_point(field_set_allocated:paddle_mobile.framework.proto.OpDesc.Var.parameter)
} }
...@@ -3239,15 +3212,14 @@ inline const ::std::string &OpDesc::type() const { ...@@ -3239,15 +3212,14 @@ inline const ::std::string &OpDesc::type() const {
} }
inline void OpDesc::set_type(const ::std::string &value) { inline void OpDesc::set_type(const ::std::string &value) {
set_has_type(); set_has_type();
type_.SetNoArena( type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); value);
// @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpDesc.type) // @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpDesc.type)
} }
#if LANG_CXX11 #if LANG_CXX11
inline void OpDesc::set_type(::std::string &&value) { inline void OpDesc::set_type(::std::string &&value) {
set_has_type(); set_has_type();
type_.SetNoArena( type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::move(value)); ::std::move(value));
// @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpDesc.type) // @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpDesc.type)
} }
...@@ -3255,15 +3227,13 @@ inline void OpDesc::set_type(::std::string &&value) { ...@@ -3255,15 +3227,13 @@ inline void OpDesc::set_type(::std::string &&value) {
inline void OpDesc::set_type(const char *value) { inline void OpDesc::set_type(const char *value) {
GOOGLE_DCHECK(value != NULL); GOOGLE_DCHECK(value != NULL);
set_has_type(); set_has_type();
type_.SetNoArena( type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(value)); ::std::string(value));
// @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpDesc.type) // @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpDesc.type)
} }
inline void OpDesc::set_type(const char *value, size_t size) { inline void OpDesc::set_type(const char *value, size_t size) {
set_has_type(); set_has_type();
type_.SetNoArena( type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(reinterpret_cast<const char *>(value), size)); ::std::string(reinterpret_cast<const char *>(value), size));
// @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpDesc.type) // @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpDesc.type)
} }
...@@ -3421,15 +3391,14 @@ inline const ::std::string &OpProto_Var::name() const { ...@@ -3421,15 +3391,14 @@ inline const ::std::string &OpProto_Var::name() const {
} }
inline void OpProto_Var::set_name(const ::std::string &value) { inline void OpProto_Var::set_name(const ::std::string &value) {
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); value);
// @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpProto.Var.name) // @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpProto.Var.name)
} }
#if LANG_CXX11 #if LANG_CXX11
inline void OpProto_Var::set_name(::std::string &&value) { inline void OpProto_Var::set_name(::std::string &&value) {
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::move(value)); ::std::move(value));
// @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpProto.Var.name) // @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpProto.Var.name)
} }
...@@ -3437,15 +3406,13 @@ inline void OpProto_Var::set_name(::std::string &&value) { ...@@ -3437,15 +3406,13 @@ inline void OpProto_Var::set_name(::std::string &&value) {
inline void OpProto_Var::set_name(const char *value) { inline void OpProto_Var::set_name(const char *value) {
GOOGLE_DCHECK(value != NULL); GOOGLE_DCHECK(value != NULL);
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(value)); ::std::string(value));
// @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpProto.Var.name) // @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpProto.Var.name)
} }
inline void OpProto_Var::set_name(const char *value, size_t size) { inline void OpProto_Var::set_name(const char *value, size_t size) {
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(reinterpret_cast<const char *>(value), size)); ::std::string(reinterpret_cast<const char *>(value), size));
// @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpProto.Var.name) // @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpProto.Var.name)
} }
...@@ -3629,15 +3596,14 @@ inline const ::std::string &OpProto_Attr::name() const { ...@@ -3629,15 +3596,14 @@ inline const ::std::string &OpProto_Attr::name() const {
} }
inline void OpProto_Attr::set_name(const ::std::string &value) { inline void OpProto_Attr::set_name(const ::std::string &value) {
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); value);
// @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpProto.Attr.name) // @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpProto.Attr.name)
} }
#if LANG_CXX11 #if LANG_CXX11
inline void OpProto_Attr::set_name(::std::string &&value) { inline void OpProto_Attr::set_name(::std::string &&value) {
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::move(value)); ::std::move(value));
// @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpProto.Attr.name) // @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpProto.Attr.name)
} }
...@@ -3645,15 +3611,13 @@ inline void OpProto_Attr::set_name(::std::string &&value) { ...@@ -3645,15 +3611,13 @@ inline void OpProto_Attr::set_name(::std::string &&value) {
inline void OpProto_Attr::set_name(const char *value) { inline void OpProto_Attr::set_name(const char *value) {
GOOGLE_DCHECK(value != NULL); GOOGLE_DCHECK(value != NULL);
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(value)); ::std::string(value));
// @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpProto.Attr.name) // @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpProto.Attr.name)
} }
inline void OpProto_Attr::set_name(const char *value, size_t size) { inline void OpProto_Attr::set_name(const char *value, size_t size) {
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(reinterpret_cast<const char *>(value), size)); ::std::string(reinterpret_cast<const char *>(value), size));
// @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpProto.Attr.name) // @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpProto.Attr.name)
} }
...@@ -3813,15 +3777,14 @@ inline const ::std::string &OpProto::type() const { ...@@ -3813,15 +3777,14 @@ inline const ::std::string &OpProto::type() const {
} }
inline void OpProto::set_type(const ::std::string &value) { inline void OpProto::set_type(const ::std::string &value) {
set_has_type(); set_has_type();
type_.SetNoArena( type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); value);
// @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpProto.type) // @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.OpProto.type)
} }
#if LANG_CXX11 #if LANG_CXX11
inline void OpProto::set_type(::std::string &&value) { inline void OpProto::set_type(::std::string &&value) {
set_has_type(); set_has_type();
type_.SetNoArena( type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::move(value)); ::std::move(value));
// @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpProto.type) // @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.OpProto.type)
} }
...@@ -3829,15 +3792,13 @@ inline void OpProto::set_type(::std::string &&value) { ...@@ -3829,15 +3792,13 @@ inline void OpProto::set_type(::std::string &&value) {
inline void OpProto::set_type(const char *value) { inline void OpProto::set_type(const char *value) {
GOOGLE_DCHECK(value != NULL); GOOGLE_DCHECK(value != NULL);
set_has_type(); set_has_type();
type_.SetNoArena( type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(value)); ::std::string(value));
// @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpProto.type) // @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.OpProto.type)
} }
inline void OpProto::set_type(const char *value, size_t size) { inline void OpProto::set_type(const char *value, size_t size) {
set_has_type(); set_has_type();
type_.SetNoArena( type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(reinterpret_cast<const char *>(value), size)); ::std::string(reinterpret_cast<const char *>(value), size));
// @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpProto.type) // @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.OpProto.type)
} }
...@@ -4443,8 +4404,7 @@ inline ::paddle_mobile::framework::proto::VarType_TensorDesc * ...@@ -4443,8 +4404,7 @@ inline ::paddle_mobile::framework::proto::VarType_TensorDesc *
VarType::mutable_selected_rows() { VarType::mutable_selected_rows() {
set_has_selected_rows(); set_has_selected_rows();
if (selected_rows_ == NULL) { if (selected_rows_ == NULL) {
selected_rows_ = selected_rows_ = new ::paddle_mobile::framework::proto::VarType_TensorDesc;
new ::paddle_mobile::framework::proto::VarType_TensorDesc;
} }
// @@protoc_insertion_point(field_mutable:paddle_mobile.framework.proto.VarType.selected_rows) // @@protoc_insertion_point(field_mutable:paddle_mobile.framework.proto.VarType.selected_rows)
return selected_rows_; return selected_rows_;
...@@ -4453,8 +4413,7 @@ inline ::paddle_mobile::framework::proto::VarType_TensorDesc * ...@@ -4453,8 +4413,7 @@ inline ::paddle_mobile::framework::proto::VarType_TensorDesc *
VarType::release_selected_rows() { VarType::release_selected_rows() {
// @@protoc_insertion_point(field_release:paddle_mobile.framework.proto.VarType.selected_rows) // @@protoc_insertion_point(field_release:paddle_mobile.framework.proto.VarType.selected_rows)
clear_has_selected_rows(); clear_has_selected_rows();
::paddle_mobile::framework::proto::VarType_TensorDesc *temp = ::paddle_mobile::framework::proto::VarType_TensorDesc *temp = selected_rows_;
selected_rows_;
selected_rows_ = NULL; selected_rows_ = NULL;
return temp; return temp;
} }
...@@ -4489,8 +4448,8 @@ VarType::lod_tensor() const { ...@@ -4489,8 +4448,8 @@ VarType::lod_tensor() const {
lod_tensor_; lod_tensor_;
// @@protoc_insertion_point(field_get:paddle_mobile.framework.proto.VarType.lod_tensor) // @@protoc_insertion_point(field_get:paddle_mobile.framework.proto.VarType.lod_tensor)
return p != NULL ? *p return p != NULL ? *p
: *reinterpret_cast<const ::paddle_mobile::framework:: : *reinterpret_cast<const ::paddle_mobile::framework::proto::
proto::VarType_LoDTensorDesc *>( VarType_LoDTensorDesc *>(
&::paddle_mobile::framework::proto:: &::paddle_mobile::framework::proto::
_VarType_LoDTensorDesc_default_instance_); _VarType_LoDTensorDesc_default_instance_);
} }
...@@ -4498,8 +4457,7 @@ inline ::paddle_mobile::framework::proto::VarType_LoDTensorDesc * ...@@ -4498,8 +4457,7 @@ inline ::paddle_mobile::framework::proto::VarType_LoDTensorDesc *
VarType::mutable_lod_tensor() { VarType::mutable_lod_tensor() {
set_has_lod_tensor(); set_has_lod_tensor();
if (lod_tensor_ == NULL) { if (lod_tensor_ == NULL) {
lod_tensor_ = lod_tensor_ = new ::paddle_mobile::framework::proto::VarType_LoDTensorDesc;
new ::paddle_mobile::framework::proto::VarType_LoDTensorDesc;
} }
// @@protoc_insertion_point(field_mutable:paddle_mobile.framework.proto.VarType.lod_tensor) // @@protoc_insertion_point(field_mutable:paddle_mobile.framework.proto.VarType.lod_tensor)
return lod_tensor_; return lod_tensor_;
...@@ -4508,8 +4466,7 @@ inline ::paddle_mobile::framework::proto::VarType_LoDTensorDesc * ...@@ -4508,8 +4466,7 @@ inline ::paddle_mobile::framework::proto::VarType_LoDTensorDesc *
VarType::release_lod_tensor() { VarType::release_lod_tensor() {
// @@protoc_insertion_point(field_release:paddle_mobile.framework.proto.VarType.lod_tensor) // @@protoc_insertion_point(field_release:paddle_mobile.framework.proto.VarType.lod_tensor)
clear_has_lod_tensor(); clear_has_lod_tensor();
::paddle_mobile::framework::proto::VarType_LoDTensorDesc *temp = ::paddle_mobile::framework::proto::VarType_LoDTensorDesc *temp = lod_tensor_;
lod_tensor_;
lod_tensor_ = NULL; lod_tensor_ = NULL;
return temp; return temp;
} }
...@@ -4544,8 +4501,7 @@ VarType::tensor_array() const { ...@@ -4544,8 +4501,7 @@ VarType::tensor_array() const {
const ::paddle_mobile::framework::proto::VarType_LoDTensorArrayDesc *p = const ::paddle_mobile::framework::proto::VarType_LoDTensorArrayDesc *p =
tensor_array_; tensor_array_;
// @@protoc_insertion_point(field_get:paddle_mobile.framework.proto.VarType.tensor_array) // @@protoc_insertion_point(field_get:paddle_mobile.framework.proto.VarType.tensor_array)
return p != NULL return p != NULL ? *p
? *p
: *reinterpret_cast<const ::paddle_mobile::framework::proto:: : *reinterpret_cast<const ::paddle_mobile::framework::proto::
VarType_LoDTensorArrayDesc *>( VarType_LoDTensorArrayDesc *>(
&::paddle_mobile::framework::proto:: &::paddle_mobile::framework::proto::
...@@ -4644,18 +4600,18 @@ inline void VarType::set_has_channel() { _has_bits_[0] |= 0x00000010u; } ...@@ -4644,18 +4600,18 @@ inline void VarType::set_has_channel() { _has_bits_[0] |= 0x00000010u; }
inline void VarType::clear_has_channel() { _has_bits_[0] &= ~0x00000010u; } inline void VarType::clear_has_channel() { _has_bits_[0] &= ~0x00000010u; }
inline void VarType::clear_channel() { inline void VarType::clear_channel() {
if (channel_ != NULL) if (channel_ != NULL)
channel_ channel_->::paddle_mobile::framework::proto::VarType_ChannelDesc::Clear();
->::paddle_mobile::framework::proto::VarType_ChannelDesc::Clear();
clear_has_channel(); clear_has_channel();
} }
inline const ::paddle_mobile::framework::proto::VarType_ChannelDesc & inline const ::paddle_mobile::framework::proto::VarType_ChannelDesc &
VarType::channel() const { VarType::channel() const {
const ::paddle_mobile::framework::proto::VarType_ChannelDesc *p = channel_; const ::paddle_mobile::framework::proto::VarType_ChannelDesc *p = channel_;
// @@protoc_insertion_point(field_get:paddle_mobile.framework.proto.VarType.channel) // @@protoc_insertion_point(field_get:paddle_mobile.framework.proto.VarType.channel)
return p != NULL ? *p return p != NULL
: *reinterpret_cast<const ::paddle_mobile::framework:: ? *p
proto::VarType_ChannelDesc *>( : *reinterpret_cast<
&::paddle_mobile::framework::proto:: const ::paddle_mobile::framework::proto::VarType_ChannelDesc
*>(&::paddle_mobile::framework::proto::
_VarType_ChannelDesc_default_instance_); _VarType_ChannelDesc_default_instance_);
} }
inline ::paddle_mobile::framework::proto::VarType_ChannelDesc * inline ::paddle_mobile::framework::proto::VarType_ChannelDesc *
...@@ -4759,15 +4715,14 @@ inline const ::std::string &VarDesc::name() const { ...@@ -4759,15 +4715,14 @@ inline const ::std::string &VarDesc::name() const {
} }
inline void VarDesc::set_name(const ::std::string &value) { inline void VarDesc::set_name(const ::std::string &value) {
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); value);
// @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.VarDesc.name) // @@protoc_insertion_point(field_set:paddle_mobile.framework.proto.VarDesc.name)
} }
#if LANG_CXX11 #if LANG_CXX11
inline void VarDesc::set_name(::std::string &&value) { inline void VarDesc::set_name(::std::string &&value) {
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::move(value)); ::std::move(value));
// @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.VarDesc.name) // @@protoc_insertion_point(field_set_rvalue:paddle_mobile.framework.proto.VarDesc.name)
} }
...@@ -4775,15 +4730,13 @@ inline void VarDesc::set_name(::std::string &&value) { ...@@ -4775,15 +4730,13 @@ inline void VarDesc::set_name(::std::string &&value) {
inline void VarDesc::set_name(const char *value) { inline void VarDesc::set_name(const char *value) {
GOOGLE_DCHECK(value != NULL); GOOGLE_DCHECK(value != NULL);
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(value)); ::std::string(value));
// @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.VarDesc.name) // @@protoc_insertion_point(field_set_char:paddle_mobile.framework.proto.VarDesc.name)
} }
inline void VarDesc::set_name(const char *value, size_t size) { inline void VarDesc::set_name(const char *value, size_t size) {
set_has_name(); set_has_name();
name_.SetNoArena( name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(reinterpret_cast<const char *>(value), size)); ::std::string(reinterpret_cast<const char *>(value), size));
// @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.VarDesc.name) // @@protoc_insertion_point(field_set_pointer:paddle_mobile.framework.proto.VarDesc.name)
} }
......
...@@ -152,8 +152,7 @@ bool CheckLoD(const LoD &in, int tensor_height) { ...@@ -152,8 +152,7 @@ bool CheckLoD(const LoD &in, int tensor_height) {
// check: all the offsets in a level should be ascending(no same // check: all the offsets in a level should be ascending(no same
// items // items
// allows). // allows).
if (!std::is_sorted(level.begin(), level.begin(), if (!std::is_sorted(level.begin(), level.begin(), [](size_t a, size_t b) {
[](size_t a, size_t b) {
if (a < b) if (a < b)
return true; return true;
return false; return false;
...@@ -188,8 +187,7 @@ bool CheckAbsLoD(const LoD &in, int tensor_height) { ...@@ -188,8 +187,7 @@ bool CheckAbsLoD(const LoD &in, int tensor_height) {
// check: all the offsets in a level should be ascending(no same // check: all the offsets in a level should be ascending(no same
// items // items
// allows). // allows).
if (!std::is_sorted(level.begin(), level.begin(), if (!std::is_sorted(level.begin(), level.begin(), [](size_t a, size_t b) {
[](size_t a, size_t b) {
if (a < b) if (a < b)
return true; return true;
return false; return false;
......
...@@ -102,7 +102,7 @@ bool CheckAbsLoD(const LoD &in, int tensor_height = -1); ...@@ -102,7 +102,7 @@ bool CheckAbsLoD(const LoD &in, int tensor_height = -1);
* see https://en.wikipedia.org/wiki/Level_of_details for reference. * see https://en.wikipedia.org/wiki/Level_of_details for reference.
*/ */
class LoDTensor : public Tensor { class LoDTensor : public Tensor {
public: public:
LoDTensor() : Tensor() {} LoDTensor() : Tensor() {}
explicit LoDTensor(const LoD &lod) : lod_(lod) {} explicit LoDTensor(const LoD &lod) : lod_(lod) {}
...@@ -139,7 +139,7 @@ class LoDTensor : public Tensor { ...@@ -139,7 +139,7 @@ class LoDTensor : public Tensor {
return (lod_)[level].size() - 1; return (lod_)[level].size() - 1;
} }
private: private:
LoD lod_; LoD lod_;
}; };
......
...@@ -26,7 +26,7 @@ namespace paddle_mobile { ...@@ -26,7 +26,7 @@ namespace paddle_mobile {
namespace framework { namespace framework {
class OpDesc : PaddleMobileObject { class OpDesc : PaddleMobileObject {
public: public:
OpDesc(const proto::OpDesc &desc); OpDesc(const proto::OpDesc &desc);
const std::vector<std::string> &Input(const std::string &name) const; const std::vector<std::string> &Input(const std::string &name) const;
const std::vector<std::string> &Output(const std::string &name) const; const std::vector<std::string> &Output(const std::string &name) const;
...@@ -40,7 +40,7 @@ class OpDesc : PaddleMobileObject { ...@@ -40,7 +40,7 @@ class OpDesc : PaddleMobileObject {
const std::string &Type() { return desc_.type(); }; const std::string &Type() { return desc_.type(); };
private: private:
proto::OpDesc desc_; proto::OpDesc desc_;
VariableNameMap inputs_; VariableNameMap inputs_;
VariableNameMap outputs_; VariableNameMap outputs_;
......
...@@ -39,7 +39,7 @@ template <typename Dtype> class OpInfoMap; ...@@ -39,7 +39,7 @@ template <typename Dtype> class OpInfoMap;
template <typename Dtype> static OpInfoMap<Dtype> *g_op_info_map = nullptr; template <typename Dtype> static OpInfoMap<Dtype> *g_op_info_map = nullptr;
template <typename Dtype> class OpInfoMap { template <typename Dtype> class OpInfoMap {
public: public:
static OpInfoMap &Instance() { static OpInfoMap &Instance() {
if (g_op_info_map<Dtype> == nullptr) { if (g_op_info_map<Dtype> == nullptr) {
g_op_info_map<Dtype> = new OpInfoMap(); g_op_info_map<Dtype> = new OpInfoMap();
...@@ -83,7 +83,7 @@ template <typename Dtype> class OpInfoMap { ...@@ -83,7 +83,7 @@ template <typename Dtype> class OpInfoMap {
return &map_; return &map_;
} }
private: private:
OpInfoMap() = default; OpInfoMap() = default;
std::unordered_map<std::string, OpInfo<Dtype>> map_; std::unordered_map<std::string, OpInfo<Dtype>> map_;
......
...@@ -27,8 +27,7 @@ struct OpKernelType { ...@@ -27,8 +27,7 @@ struct OpKernelType {
struct Hash { struct Hash {
size_t operator()(const OpKernelType &key) const { size_t operator()(const OpKernelType &key) const {
int data_type = static_cast<int>(key.data_type_) << LEFT_SHIFT; int data_type = static_cast<int>(key.data_type_) << LEFT_SHIFT;
int data_layout = static_cast<int>(key.data_layout_) int data_layout = static_cast<int>(key.data_layout_) << (LEFT_SHIFT * 2);
<< (LEFT_SHIFT * 2);
std::hash<int> hasher; std::hash<int> hasher;
return hasher(data_type + data_layout); return hasher(data_type + data_layout);
......
...@@ -49,7 +49,7 @@ static std::unordered_map< ...@@ -49,7 +49,7 @@ static std::unordered_map<
{"fetch", {{"X"}, {"Out"}}}}; {"fetch", {{"X"}, {"Out"}}}};
template <typename Dtype> class OperatorBase : PaddleMobileObject { template <typename Dtype> class OperatorBase : PaddleMobileObject {
public: public:
OperatorBase(const std::string &type, const VariableNameMap &inputs, OperatorBase(const std::string &type, const VariableNameMap &inputs,
const VariableNameMap &outputs, const AttributeMap &attrs, const VariableNameMap &outputs, const AttributeMap &attrs,
std::shared_ptr<Scope> scope); std::shared_ptr<Scope> scope);
...@@ -66,30 +66,30 @@ template <typename Dtype> class OperatorBase : PaddleMobileObject { ...@@ -66,30 +66,30 @@ template <typename Dtype> class OperatorBase : PaddleMobileObject {
} }
} }
protected: protected:
std::shared_ptr<Scope> scope_; std::shared_ptr<Scope> scope_;
std::string type_; std::string type_;
VariableNameMap inputs_; VariableNameMap inputs_;
VariableNameMap outputs_; VariableNameMap outputs_;
AttributeMap attrs_; AttributeMap attrs_;
private: private:
void CheckAllInputOutputSet() const; void CheckAllInputOutputSet() const;
}; };
template <typename Dtype> template <typename Dtype>
class OperatorWithKernel : public OperatorBase<Dtype> { class OperatorWithKernel : public OperatorBase<Dtype> {
public: public:
OperatorWithKernel(const std::string &type, const VariableNameMap &inputs, OperatorWithKernel(const std::string &type, const VariableNameMap &inputs,
const VariableNameMap &outputs, const VariableNameMap &outputs, const AttributeMap &attrs,
const AttributeMap &attrs, std::shared_ptr<Scope> scope) std::shared_ptr<Scope> scope)
: OperatorBase<Dtype>(type, inputs, outputs, attrs, scope) {} : OperatorBase<Dtype>(type, inputs, outputs, attrs, scope) {}
virtual void InferShape() const = 0; virtual void InferShape() const = 0;
virtual void Run() const = 0; virtual void Run() const = 0;
}; };
template <typename Dtype, typename P> class OpKernelBase : PaddleMobileObject { template <typename Dtype, typename P> class OpKernelBase : PaddleMobileObject {
public: public:
virtual void Compute(const P &para) const = 0; virtual void Compute(const P &para) const = 0;
virtual ~OpKernelBase() = default; virtual ~OpKernelBase() = default;
......
...@@ -24,13 +24,13 @@ SOFTWARE. ...@@ -24,13 +24,13 @@ SOFTWARE.
namespace paddle_mobile { namespace paddle_mobile {
class PaddleMobileObject { class PaddleMobileObject {
public: public:
virtual std::string ToString() { virtual std::string ToString() {
char address[128] = {0}; char address[128] = {0};
sprintf(address, "%p", this); sprintf(address, "%p", this);
return std::string(address); return std::string(address);
} }
private: private:
}; };
} // namespace paddle_mobile } // namespace paddle_mobile
...@@ -29,7 +29,7 @@ namespace paddle_mobile { ...@@ -29,7 +29,7 @@ namespace paddle_mobile {
namespace framework { namespace framework {
class Node : PaddleMobileObject { class Node : PaddleMobileObject {
public: public:
Node(const std::string &type) : type_(type) {} Node(const std::string &type) : type_(type) {}
Node(std::shared_ptr<OpDesc> op_desc) Node(std::shared_ptr<OpDesc> op_desc)
: op_desc_(op_desc), type_(op_desc->Type()){}; : op_desc_(op_desc), type_(op_desc->Type()){};
...@@ -39,7 +39,7 @@ class Node : PaddleMobileObject { ...@@ -39,7 +39,7 @@ class Node : PaddleMobileObject {
Node &To(int index); Node &To(int index);
uint depth(uint begin = 0); uint depth(uint begin = 0);
private: private:
std::shared_ptr<OpDesc> op_desc_; std::shared_ptr<OpDesc> op_desc_;
std::string ToString(std::string blank, const Node *node) const; std::string ToString(std::string blank, const Node *node) const;
std::vector<std::shared_ptr<Node>> outputs_; std::vector<std::shared_ptr<Node>> outputs_;
......
...@@ -35,8 +35,7 @@ ProgramOptimize::FushionOptimize(std::shared_ptr<ProgramDesc> ori_des) { ...@@ -35,8 +35,7 @@ ProgramOptimize::FushionOptimize(std::shared_ptr<ProgramDesc> ori_des) {
auto op = block->Ops()[j]; auto op = block->Ops()[j];
auto op_type = op->Type(); auto op_type = op->Type();
// DLOG << "op type: " << op_type << " index: " << j; // DLOG << "op type: " << op_type << " index: " << j;
if (op_input_output_key.find(op->Type()) == if (op_input_output_key.find(op->Type()) == op_input_output_key.end()) {
op_input_output_key.end()) {
return NULL; return NULL;
} }
......
...@@ -26,13 +26,13 @@ namespace paddle_mobile { ...@@ -26,13 +26,13 @@ namespace paddle_mobile {
namespace framework { namespace framework {
class ProgramOptimize { class ProgramOptimize {
public: public:
ProgramOptimize() {} ProgramOptimize() {}
std::shared_ptr<ProgramDesc> Optimize(); std::shared_ptr<ProgramDesc> Optimize();
std::shared_ptr<ProgramDesc> std::shared_ptr<ProgramDesc>
FushionOptimize(std::shared_ptr<ProgramDesc> ori_des); FushionOptimize(std::shared_ptr<ProgramDesc> ori_des);
private: private:
// std::shared_ptr<ProgramDesc> ori_desc_; // std::shared_ptr<ProgramDesc> ori_desc_;
std::vector<std::unordered_map<std::string, std::shared_ptr<Node>>> std::vector<std::unordered_map<std::string, std::shared_ptr<Node>>>
outputs_nodes_; outputs_nodes_;
......
...@@ -28,12 +28,12 @@ namespace framework { ...@@ -28,12 +28,12 @@ namespace framework {
template <typename Dtype, Precision P = Precision::FP32> template <typename Dtype, Precision P = Precision::FP32>
class Program : PaddleMobileObject { class Program : PaddleMobileObject {
public: public:
std::shared_ptr<ProgramDesc> originProgram; std::shared_ptr<ProgramDesc> originProgram;
std::shared_ptr<ProgramDesc> optimizeProgram; std::shared_ptr<ProgramDesc> optimizeProgram;
std::shared_ptr<Scope> scope; std::shared_ptr<Scope> scope;
private: private:
}; };
} // namespace framework } // namespace framework
......
...@@ -28,12 +28,12 @@ namespace paddle_mobile { ...@@ -28,12 +28,12 @@ namespace paddle_mobile {
namespace framework { namespace framework {
class ProgramDesc : PaddleMobileObject { class ProgramDesc : PaddleMobileObject {
public: public:
ProgramDesc(const proto::ProgramDesc &desc); ProgramDesc(const proto::ProgramDesc &desc);
std::shared_ptr<BlockDesc> Block(size_t idx); std::shared_ptr<BlockDesc> Block(size_t idx);
const std::vector<std::shared_ptr<BlockDesc>> &Blocks() { return blocks_; }; const std::vector<std::shared_ptr<BlockDesc>> &Blocks() { return blocks_; };
private: private:
std::vector<std::shared_ptr<BlockDesc>> blocks_; std::vector<std::shared_ptr<BlockDesc>> blocks_;
proto::ProgramDesc desc_; proto::ProgramDesc desc_;
}; };
......
...@@ -26,7 +26,7 @@ SOFTWARE. ...@@ -26,7 +26,7 @@ SOFTWARE.
namespace paddle_mobile { namespace paddle_mobile {
namespace framework { namespace framework {
class Scope { class Scope {
public: public:
Scope() {} Scope() {}
~Scope() {} ~Scope() {}
...@@ -67,7 +67,7 @@ class Scope { ...@@ -67,7 +67,7 @@ class Scope {
Variable *FindVarLocally(const std::string &name) const; Variable *FindVarLocally(const std::string &name) const;
private: private:
// Call Scope::NewScope for a sub-scope. // Call Scope::NewScope for a sub-scope.
explicit Scope(Scope const *parent) : parent_(parent) {} explicit Scope(Scope const *parent) : parent_(parent) {}
......
...@@ -27,7 +27,7 @@ namespace paddle_mobile { ...@@ -27,7 +27,7 @@ namespace paddle_mobile {
namespace framework { namespace framework {
class SelectedRows { class SelectedRows {
public: public:
SelectedRows(const std::vector<int64_t> &rows, const int64_t &height) SelectedRows(const std::vector<int64_t> &rows, const int64_t &height)
: rows_(rows), height_(height) { : rows_(rows), height_(height) {
value_.reset(new Tensor()); value_.reset(new Tensor());
...@@ -67,7 +67,7 @@ class SelectedRows { ...@@ -67,7 +67,7 @@ class SelectedRows {
return make_ddim(dims); return make_ddim(dims);
} }
private: private:
// Notice: rows can be duplicate. We can have {0, 4, 7, 0, 5, 7, 9} // Notice: rows can be duplicate. We can have {0, 4, 7, 0, 5, 7, 9}
// here. // here.
// SelectedRows are simply concated when adding together. Until a // SelectedRows are simply concated when adding together. Until a
......
...@@ -56,8 +56,7 @@ struct SizeOfTypeFunctor<HEAD, TAIL...> { ...@@ -56,8 +56,7 @@ struct SizeOfTypeFunctor<HEAD, TAIL...> {
}; };
static inline size_t SizeOfType(std::type_index type) { static inline size_t SizeOfType(std::type_index type) {
SizeOfTypeFunctor<int, float, double, int16_t, int64_t, bool, size_t> SizeOfTypeFunctor<int, float, double, int16_t, int64_t, bool, size_t> functor;
functor;
size_t size = functor(type); size_t size = functor(type);
// PADDLE_ENFORCE(size != 0UL, "Cannot get size of type %s", // PADDLE_ENFORCE(size != 0UL, "Cannot get size of type %s",
// type.name()); // type.name());
...@@ -67,7 +66,7 @@ static inline size_t SizeOfType(std::type_index type) { ...@@ -67,7 +66,7 @@ static inline size_t SizeOfType(std::type_index type) {
class LoDTensor; class LoDTensor;
class Tensor { class Tensor {
public: public:
Tensor() : offset_(0) {} Tensor() : offset_(0) {}
/*! Return a pointer to mutable memory block. */ /*! Return a pointer to mutable memory block. */
...@@ -78,8 +77,8 @@ class Tensor { ...@@ -78,8 +77,8 @@ class Tensor {
// typeid(T).hash_code(), // typeid(T).hash_code(),
// "Tensor holds the wrong type, it holds %s", // "Tensor holds the wrong type, it holds %s",
// this->holder_->type().name()); // this->holder_->type().name());
return reinterpret_cast<T *>( return reinterpret_cast<T *>(reinterpret_cast<uintptr_t>(holder_->ptr()) +
reinterpret_cast<uintptr_t>(holder_->ptr()) + offset_); offset_);
} }
/*! Return a pointer to constant memory block. */ /*! Return a pointer to constant memory block. */
...@@ -236,7 +235,7 @@ class Tensor { ...@@ -236,7 +235,7 @@ class Tensor {
inline void set_layout(const DataLayout layout) { layout_ = layout; } inline void set_layout(const DataLayout layout) { layout_ = layout; }
private: private:
/** /**
* @note Placeholder hides type T, so it doesn't appear as a * @note Placeholder hides type T, so it doesn't appear as a
* template * template
......
...@@ -189,8 +189,7 @@ void TensorFromStream(std::istream &is, framework::Tensor *tensor) { ...@@ -189,8 +189,7 @@ void TensorFromStream(std::istream &is, framework::Tensor *tensor) {
{ // read tensor { // read tensor
std::vector<int64_t> dims; std::vector<int64_t> dims;
dims.reserve(static_cast<size_t>(desc.dims().size())); dims.reserve(static_cast<size_t>(desc.dims().size()));
std::copy(desc.dims().begin(), desc.dims().end(), std::copy(desc.dims().begin(), desc.dims().end(), std::back_inserter(dims));
std::back_inserter(dims));
tensor->Resize(framework::make_ddim(dims)); tensor->Resize(framework::make_ddim(dims));
void *buf; void *buf;
......
...@@ -25,7 +25,7 @@ namespace paddle_mobile { ...@@ -25,7 +25,7 @@ namespace paddle_mobile {
namespace framework { namespace framework {
class VarDesc { class VarDesc {
public: public:
VarDesc(const proto::VarDesc &desc); VarDesc(const proto::VarDesc &desc);
std::string Name() const { return desc_.name(); } std::string Name() const { return desc_.name(); }
...@@ -80,7 +80,7 @@ class VarDesc { ...@@ -80,7 +80,7 @@ class VarDesc {
return this->RepeatedToVector(tensor_desc().dims()); return this->RepeatedToVector(tensor_desc().dims());
} }
private: private:
proto::VarDesc desc_; proto::VarDesc desc_;
}; };
......
...@@ -28,7 +28,7 @@ SOFTWARE. ...@@ -28,7 +28,7 @@ SOFTWARE.
namespace paddle_mobile { namespace paddle_mobile {
namespace framework { namespace framework {
class Variable : public PaddleMobileObject { class Variable : public PaddleMobileObject {
public: public:
template <typename T> const T *Get() const { template <typename T> const T *Get() const {
return static_cast<const T *>(holder_->Ptr()); return static_cast<const T *>(holder_->Ptr());
} }
...@@ -67,7 +67,7 @@ class Variable : public PaddleMobileObject { ...@@ -67,7 +67,7 @@ class Variable : public PaddleMobileObject {
void SetName(const std::string *name) { name_ = name; } void SetName(const std::string *name) { name_ = name; }
private: private:
struct Placeholder { struct Placeholder {
Placeholder() = default; Placeholder() = default;
virtual ~Placeholder() = default; virtual ~Placeholder() = default;
......
...@@ -174,10 +174,8 @@ Loader<Dtype, P>::Load(const std::string &dirname) { ...@@ -174,10 +174,8 @@ Loader<Dtype, P>::Load(const std::string &dirname) {
auto var = scope->Var(var_desc->Name()); auto var = scope->Var(var_desc->Name());
if (var_desc->GetType() == framework::proto::VarType::LOD_TENSOR) { if (var_desc->GetType() == framework::proto::VarType::LOD_TENSOR) {
if (var_desc->Persistable() && if (var_desc->Persistable() &&
var_desc->GetType() != var_desc->GetType() != framework::proto::VarType::FEED_MINIBATCH &&
framework::proto::VarType::FEED_MINIBATCH && var_desc->GetType() != framework::proto::VarType::FETCH_LIST) {
var_desc->GetType() !=
framework::proto::VarType::FETCH_LIST) {
framework::LoDTensor *tensor = framework::LoDTensor *tensor =
var->GetMutable<framework::LoDTensor>(); var->GetMutable<framework::LoDTensor>();
// to load // to load
...@@ -268,8 +266,7 @@ Loader<Dtype, P>::Load(const std::string &dirname) { ...@@ -268,8 +266,7 @@ Loader<Dtype, P>::Load(const std::string &dirname) {
} }
if (var.persistable() && if (var.persistable() &&
var.type().type() != var.type().type() != framework::proto::VarType::FEED_MINIBATCH &&
framework::proto::VarType::FEED_MINIBATCH &&
var.type().type() != framework::proto::VarType::FETCH_LIST) { var.type().type() != framework::proto::VarType::FETCH_LIST) {
// std::cout << " to load " << var.name() << // std::cout << " to load " << var.name() <<
// std::endl; // std::endl;
...@@ -289,8 +286,7 @@ Loader<Dtype, P>::Load(const std::string &dirname) { ...@@ -289,8 +286,7 @@ Loader<Dtype, P>::Load(const std::string &dirname) {
// 2 Lod information // 2 Lod information
uint64_t lod_level; uint64_t lod_level;
is.read(reinterpret_cast<char *>(&lod_level), is.read(reinterpret_cast<char *>(&lod_level), sizeof(lod_level));
sizeof(lod_level));
// std::cout << " load level: " << lod_level << // std::cout << " load level: " << lod_level <<
// std::endl; // std::endl;
// std::cout << " lod info: " << std::endl; // std::cout << " lod info: " << std::endl;
......
...@@ -29,10 +29,10 @@ namespace paddle_mobile { ...@@ -29,10 +29,10 @@ namespace paddle_mobile {
template <typename Dtype, Precision P = Precision::FP32> template <typename Dtype, Precision P = Precision::FP32>
class Loader : PaddleMobileObject { class Loader : PaddleMobileObject {
public: public:
const framework::Program<Dtype, P> Load(const std::string &dirname); const framework::Program<Dtype, P> Load(const std::string &dirname);
private: private:
void LoadVar(framework::LoDTensor *tensor, const std::string &file_path); void LoadVar(framework::LoDTensor *tensor, const std::string &file_path);
}; };
......
...@@ -40,7 +40,7 @@ void Free(void *ptr); ...@@ -40,7 +40,7 @@ void Free(void *ptr);
template <typename T> class PODDeleter { template <typename T> class PODDeleter {
static_assert(std::is_pod<T>::value, "T must be POD"); static_assert(std::is_pod<T>::value, "T must be POD");
public: public:
explicit PODDeleter(){}; explicit PODDeleter(){};
void operator()(T *ptr) { Free(static_cast<void *>(ptr)); } void operator()(T *ptr) { Free(static_cast<void *>(ptr)); }
...@@ -55,7 +55,7 @@ template <typename T> class PODDeleter { ...@@ -55,7 +55,7 @@ template <typename T> class PODDeleter {
* reinterpret_cast * reinterpret_cast
*/ */
template <typename T> class PlainDeleter { template <typename T> class PlainDeleter {
public: public:
explicit PlainDeleter(){}; explicit PlainDeleter(){};
void operator()(T *ptr) { Free(reinterpret_cast<void *>(ptr)); } void operator()(T *ptr) { Free(reinterpret_cast<void *>(ptr)); }
......
...@@ -27,13 +27,13 @@ using namespace framework; ...@@ -27,13 +27,13 @@ using namespace framework;
template <typename DeviceType, typename T> template <typename DeviceType, typename T>
class BatchNormOp : public framework::OperatorWithKernel<DeviceType> { class BatchNormOp : public framework::OperatorWithKernel<DeviceType> {
public: public:
BatchNormOp(const std::string &type, const VariableNameMap &inputs, BatchNormOp(const std::string &type, const VariableNameMap &inputs,
const VariableNameMap &outputs, const VariableNameMap &outputs,
const framework::AttributeMap attrs, const framework::AttributeMap attrs,
std::shared_ptr<framework::Scope> scope) std::shared_ptr<framework::Scope> scope)
: framework::OperatorWithKernel<DeviceType>(type, inputs, outputs, : framework::OperatorWithKernel<DeviceType>(type, inputs, outputs, attrs,
attrs, scope), scope),
param_(inputs, outputs, attrs, *scope) {} param_(inputs, outputs, attrs, *scope) {}
void Run() const { void Run() const {
...@@ -44,7 +44,7 @@ class BatchNormOp : public framework::OperatorWithKernel<DeviceType> { ...@@ -44,7 +44,7 @@ class BatchNormOp : public framework::OperatorWithKernel<DeviceType> {
using framework::OperatorWithKernel<DeviceType>::OperatorWithKernel; using framework::OperatorWithKernel<DeviceType>::OperatorWithKernel;
void InferShape() const override; void InferShape() const override;
protected: protected:
BatchNormParam param_; BatchNormParam param_;
}; };
......
...@@ -26,13 +26,12 @@ using namespace framework; ...@@ -26,13 +26,12 @@ using namespace framework;
template <typename DeviceType, typename T> template <typename DeviceType, typename T>
class ConcatOp : public framework::OperatorWithKernel<DeviceType> { class ConcatOp : public framework::OperatorWithKernel<DeviceType> {
public: public:
ConcatOp(const std::string &type, const VariableNameMap &inputs, ConcatOp(const std::string &type, const VariableNameMap &inputs,
const VariableNameMap &outputs, const VariableNameMap &outputs, const framework::AttributeMap attrs,
const framework::AttributeMap attrs,
std::shared_ptr<framework::Scope> scope) std::shared_ptr<framework::Scope> scope)
: framework::OperatorWithKernel<DeviceType>(type, inputs, outputs, : framework::OperatorWithKernel<DeviceType>(type, inputs, outputs, attrs,
attrs, scope), scope),
param_(inputs, outputs, attrs, *scope) {} param_(inputs, outputs, attrs, *scope) {}
void Run() const { void Run() const {
...@@ -43,7 +42,7 @@ class ConcatOp : public framework::OperatorWithKernel<DeviceType> { ...@@ -43,7 +42,7 @@ class ConcatOp : public framework::OperatorWithKernel<DeviceType> {
using framework::OperatorWithKernel<DeviceType>::OperatorWithKernel; using framework::OperatorWithKernel<DeviceType>::OperatorWithKernel;
void InferShape() const override; void InferShape() const override;
protected: protected:
ConcatParam param_; ConcatParam param_;
}; };
......
...@@ -60,9 +60,9 @@ void ConvOp<Dtype, T>::InferShape() const { ...@@ -60,9 +60,9 @@ void ConvOp<Dtype, T>::InferShape() const {
std::vector<int64_t> output_shape({in_dims[0], filter_dims[0]}); std::vector<int64_t> output_shape({in_dims[0], filter_dims[0]});
for (size_t i = 0; i < strides.size(); ++i) { for (size_t i = 0; i < strides.size(); ++i) {
output_shape.push_back(ConvOutputSize(in_dims[i + 2], output_shape.push_back(ConvOutputSize(in_dims[i + 2], filter_dims[i + 2],
filter_dims[i + 2], dilations[i], dilations[i], paddings[i],
paddings[i], strides[i])); strides[i]));
} }
framework::DDim ddim = framework::make_ddim(output_shape); framework::DDim ddim = framework::make_ddim(output_shape);
......
...@@ -28,12 +28,12 @@ using namespace framework; ...@@ -28,12 +28,12 @@ using namespace framework;
template <typename DeviceType, typename T> template <typename DeviceType, typename T>
class ConvOp : public framework::OperatorWithKernel<DeviceType> { class ConvOp : public framework::OperatorWithKernel<DeviceType> {
public: public:
ConvOp(const std::string &type, const VariableNameMap &inputs, ConvOp(const std::string &type, const VariableNameMap &inputs,
const VariableNameMap &outputs, const framework::AttributeMap &attrs, const VariableNameMap &outputs, const framework::AttributeMap &attrs,
std::shared_ptr<framework::Scope> scope) std::shared_ptr<framework::Scope> scope)
: framework::OperatorWithKernel<DeviceType>(type, inputs, outputs, : framework::OperatorWithKernel<DeviceType>(type, inputs, outputs, attrs,
attrs, scope), scope),
param_(inputs, outputs, attrs, *scope) {} param_(inputs, outputs, attrs, *scope) {}
using framework::OperatorWithKernel<DeviceType>::OperatorWithKernel; using framework::OperatorWithKernel<DeviceType>::OperatorWithKernel;
...@@ -45,7 +45,7 @@ class ConvOp : public framework::OperatorWithKernel<DeviceType> { ...@@ -45,7 +45,7 @@ class ConvOp : public framework::OperatorWithKernel<DeviceType> {
this->ClearVariables({"Filter", "Input"}); this->ClearVariables({"Filter", "Input"});
} }
private: private:
ConvParam param_; ConvParam param_;
}; };
......
...@@ -27,13 +27,13 @@ using namespace framework; ...@@ -27,13 +27,13 @@ using namespace framework;
template <typename DeviceType, typename T> template <typename DeviceType, typename T>
class ElementwiseAddOp : public framework::OperatorWithKernel<DeviceType> { class ElementwiseAddOp : public framework::OperatorWithKernel<DeviceType> {
public: public:
ElementwiseAddOp(const std::string &type, const VariableNameMap &inputs, ElementwiseAddOp(const std::string &type, const VariableNameMap &inputs,
const VariableNameMap &outputs, const VariableNameMap &outputs,
const framework::AttributeMap attrs, const framework::AttributeMap attrs,
std::shared_ptr<framework::Scope> scope) std::shared_ptr<framework::Scope> scope)
: framework::OperatorWithKernel<DeviceType>(type, inputs, outputs, : framework::OperatorWithKernel<DeviceType>(type, inputs, outputs, attrs,
attrs, scope), scope),
param_(inputs, outputs, attrs, *scope) {} param_(inputs, outputs, attrs, *scope) {}
void Run() const { void Run() const {
...@@ -44,7 +44,7 @@ class ElementwiseAddOp : public framework::OperatorWithKernel<DeviceType> { ...@@ -44,7 +44,7 @@ class ElementwiseAddOp : public framework::OperatorWithKernel<DeviceType> {
using framework::OperatorWithKernel<DeviceType>::OperatorWithKernel; using framework::OperatorWithKernel<DeviceType>::OperatorWithKernel;
void InferShape() const override; void InferShape() const override;
protected: protected:
ElementwiseAddParam param_; ElementwiseAddParam param_;
}; };
} // namespace operators } // namespace operators
......
...@@ -67,15 +67,14 @@ void BatchNormKernel<CPU, float>::Compute(const BatchNormParam &param) const { ...@@ -67,15 +67,14 @@ void BatchNormKernel<CPU, float>::Compute(const BatchNormParam &param) const {
/// (x * inv_var * scale) + (bias - est_mean * inv_var * scale) /// (x * inv_var * scale) + (bias - est_mean * inv_var * scale)
for (int i = 0; i < C; i++) { for (int i = 0; i < C; i++) {
new_scale_ptr[i] = inv_std_ptr[i] * scale_ptr[i]; new_scale_ptr[i] = inv_std_ptr[i] * scale_ptr[i];
new_bias_ptr[i] = new_bias_ptr[i] = bias_ptr[i] - mean_ptr[i] * inv_std_ptr[i] * scale_ptr[i];
bias_ptr[i] - mean_ptr[i] * inv_std_ptr[i] * scale_ptr[i];
{ {
for (int n = 0; n < N; n++) { for (int n = 0; n < N; n++) {
for (int h = 0; h < H; h++) { for (int h = 0; h < H; h++) {
for (int w = 0; w < W; w++) { for (int w = 0; w < W; w++) {
int index = n * stride0 + i * stride1 + h * stride2 + w; int index = n * stride0 + i * stride1 + h * stride2 + w;
out_ptr[index] = input_x_ptr[index] * new_scale_ptr[i] + out_ptr[index] =
new_bias_ptr[i]; input_x_ptr[index] * new_scale_ptr[i] + new_bias_ptr[i];
} }
} }
} }
......
...@@ -19,7 +19,7 @@ limitations under the License. */ ...@@ -19,7 +19,7 @@ limitations under the License. */
namespace paddle_mobile { namespace paddle_mobile {
namespace operators { namespace operators {
template <typename T> class ConcatFunctor { template <typename T> class ConcatFunctor {
public: public:
void operator()(const std::vector<framework::Tensor> &input, const int axis, void operator()(const std::vector<framework::Tensor> &input, const int axis,
framework::Tensor *output) { framework::Tensor *output) {
size_t num = input.size(); size_t num = input.size();
...@@ -80,8 +80,7 @@ void StridedNumelCopyWithAxis(int64_t axis, T *dst, ...@@ -80,8 +80,7 @@ void StridedNumelCopyWithAxis(int64_t axis, T *dst,
} }
for (int64_t i = 0; i < before; ++i) { for (int64_t i = 0; i < before; ++i) {
memory::Copy(dst + i * dst_after, src + i * src_after, memory::Copy(dst + i * dst_after, src + i * src_after, sizeof(T) * size);
sizeof(T) * size);
} }
} }
...@@ -98,9 +97,9 @@ void ConcatKernel<CPU, float>::Compute(const ConcatParam &param) const { ...@@ -98,9 +97,9 @@ void ConcatKernel<CPU, float>::Compute(const ConcatParam &param) const {
for (auto *in : inputs) { for (auto *in : inputs) {
auto in_stride = framework::stride_numel(in->dims()); auto in_stride = framework::stride_numel(in->dims());
auto out_stride = framework::stride_numel(out->dims()); auto out_stride = framework::stride_numel(out->dims());
StridedNumelCopyWithAxis<float>( StridedNumelCopyWithAxis<float>(axis, out->data<float>() + output_offset,
axis, out->data<float>() + output_offset, out_stride, out_stride, in->data<float>(), in_stride,
in->data<float>(), in_stride, in_stride[axis]); in_stride[axis]);
output_offset += in_stride[axis]; output_offset += in_stride[axis];
} }
} else { } else {
......
...@@ -138,12 +138,10 @@ template <> void ConvKernel<CPU, float>::Compute(const ConvParam &param) const { ...@@ -138,12 +138,10 @@ template <> void ConvKernel<CPU, float>::Compute(const ConvParam &param) const {
} }
// gemm // gemm
Tensor out_slice = Tensor out_slice = out_batch.Slice(g * out_step, (g + 1) * out_step);
out_batch.Slice(g * out_step, (g + 1) * out_step); Tensor filter_slice = filter.Slice(g * out_step, (g + 1) * out_step);
Tensor filter_slice = math::matmul<float>(filter_slice, false, col_matrix, false, float(1.0),
filter.Slice(g * out_step, (g + 1) * out_step); &out_slice, float(0.0));
math::matmul<float>(filter_slice, false, col_matrix, false,
float(1.0), &out_slice, float(0.0));
} }
} }
} }
......
...@@ -28,7 +28,7 @@ using namespace framework; ...@@ -28,7 +28,7 @@ using namespace framework;
template <typename DeviceType, typename T> template <typename DeviceType, typename T>
class BatchNormKernel class BatchNormKernel
: public framework::OpKernelBase<DeviceType, BatchNormParam> { : public framework::OpKernelBase<DeviceType, BatchNormParam> {
public: public:
void Compute(const BatchNormParam &param) const; void Compute(const BatchNormParam &param) const;
}; };
......
...@@ -26,7 +26,7 @@ using namespace framework; ...@@ -26,7 +26,7 @@ using namespace framework;
template <typename DeviceType, typename T> template <typename DeviceType, typename T>
class ConcatKernel : public framework::OpKernelBase<DeviceType, ConcatParam> { class ConcatKernel : public framework::OpKernelBase<DeviceType, ConcatParam> {
public: public:
void Compute(const ConcatParam &param) const; void Compute(const ConcatParam &param) const;
}; };
......
...@@ -31,7 +31,7 @@ using namespace framework; ...@@ -31,7 +31,7 @@ using namespace framework;
template <typename DeviceType, typename T> template <typename DeviceType, typename T>
class ConvKernel : public framework::OpKernelBase<DeviceType, ConvParam> { class ConvKernel : public framework::OpKernelBase<DeviceType, ConvParam> {
public: public:
void Compute(const ConvParam &param) const; void Compute(const ConvParam &param) const;
}; };
} // namespace operators } // namespace operators
......
...@@ -29,7 +29,7 @@ using namespace framework; ...@@ -29,7 +29,7 @@ using namespace framework;
template <typename DeviceType, typename T> template <typename DeviceType, typename T>
class ElementwiseAddKernel class ElementwiseAddKernel
: public framework::OpKernelBase<DeviceType, ElementwiseAddParam> { : public framework::OpKernelBase<DeviceType, ElementwiseAddParam> {
public: public:
void Compute(const ElementwiseAddParam &param) const; void Compute(const ElementwiseAddParam &param) const;
}; };
} // namespace operators } // namespace operators
......
...@@ -26,8 +26,8 @@ namespace operators { ...@@ -26,8 +26,8 @@ namespace operators {
using namespace framework; using namespace framework;
template <typename T> struct LRNFunctor { template <typename T> struct LRNFunctor {
void operator()(const framework::Tensor &input, framework::Tensor *out, void operator()(const framework::Tensor &input, framework::Tensor *out, int N,
int N, int C, int H, int W, int n, T k, T alpha, T beta) { int C, int H, int W, int n, T k, T alpha, T beta) {
auto input_ptr = input.data<T>(); auto input_ptr = input.data<T>();
const int start = -(n - 1) / 2; const int start = -(n - 1) / 2;
const int end = start + n; const int end = start + n;
...@@ -47,14 +47,11 @@ template <typename T> struct LRNFunctor { ...@@ -47,14 +47,11 @@ template <typename T> struct LRNFunctor {
if (channel >= 0 && channel < C) { if (channel >= 0 && channel < C) {
for (int c = 0; c < H; c++) { for (int c = 0; c < H; c++) {
for (int d = 0; d < W; d++) { for (int d = 0; d < W; d++) {
int u = int u = a * stride0 + b * stride1 + c * stride2 + d;
a * stride0 + b * stride1 + c * stride2 + d;
int i = a * stride0 + channel * stride1 + int i = a * stride0 + channel * stride1 + c * stride2 + d;
c * stride2 + d;
sqr_buffer_ptr[u] += sqr_buffer_ptr[u] += alpha * input_ptr[i] * input_ptr[i];
alpha * input_ptr[i] * input_ptr[i];
} }
} }
} }
...@@ -70,7 +67,7 @@ template <typename T> struct LRNFunctor { ...@@ -70,7 +67,7 @@ template <typename T> struct LRNFunctor {
template <typename DeviceType, typename T> template <typename DeviceType, typename T>
class LrnKernel : public framework::OpKernelBase<DeviceType, LrnParam> { class LrnKernel : public framework::OpKernelBase<DeviceType, LrnParam> {
public: public:
void Compute(const LrnParam &param) const; void Compute(const LrnParam &param) const;
}; };
} // namespace operators } // namespace operators
......
...@@ -28,7 +28,7 @@ using namespace framework; ...@@ -28,7 +28,7 @@ using namespace framework;
template <typename DeviceType, typename T> template <typename DeviceType, typename T>
class MulKernel : public framework::OpKernelBase<DeviceType, MulParam> { class MulKernel : public framework::OpKernelBase<DeviceType, MulParam> {
public: public:
void Compute(const MulParam &param) const; void Compute(const MulParam &param) const;
}; };
} // namespace operators } // namespace operators
......
...@@ -28,7 +28,7 @@ using namespace framework; ...@@ -28,7 +28,7 @@ using namespace framework;
template <typename DeviceType, typename T> template <typename DeviceType, typename T>
class PoolKernel : public framework::OpKernelBase<DeviceType, PoolParam> { class PoolKernel : public framework::OpKernelBase<DeviceType, PoolParam> {
public: public:
void Compute(const PoolParam &param) const; void Compute(const PoolParam &param) const;
}; };
} // namespace operators } // namespace operators
......
...@@ -27,12 +27,12 @@ using namespace framework; ...@@ -27,12 +27,12 @@ using namespace framework;
template <typename DeviceType, typename T> template <typename DeviceType, typename T>
class LrnOp : public framework::OperatorWithKernel<DeviceType> { class LrnOp : public framework::OperatorWithKernel<DeviceType> {
public: public:
LrnOp(const std::string &type, const VariableNameMap &inputs, LrnOp(const std::string &type, const VariableNameMap &inputs,
const VariableNameMap &outputs, const framework::AttributeMap attrs, const VariableNameMap &outputs, const framework::AttributeMap attrs,
std::shared_ptr<framework::Scope> scope) std::shared_ptr<framework::Scope> scope)
: framework::OperatorWithKernel<DeviceType>(type, inputs, outputs, : framework::OperatorWithKernel<DeviceType>(type, inputs, outputs, attrs,
attrs, scope), scope),
param_(inputs, outputs, attrs, *scope) {} param_(inputs, outputs, attrs, *scope) {}
void Run() const { void Run() const {
...@@ -43,7 +43,7 @@ class LrnOp : public framework::OperatorWithKernel<DeviceType> { ...@@ -43,7 +43,7 @@ class LrnOp : public framework::OperatorWithKernel<DeviceType> {
using framework::OperatorWithKernel<DeviceType>::OperatorWithKernel; using framework::OperatorWithKernel<DeviceType>::OperatorWithKernel;
void InferShape() const override; void InferShape() const override;
protected: protected:
LrnParam param_; LrnParam param_;
}; };
......
...@@ -69,7 +69,7 @@ inline void trim_trailing_singular_dims(framework::DDim *dims) { ...@@ -69,7 +69,7 @@ inline void trim_trailing_singular_dims(framework::DDim *dims) {
} }
template <typename T> class RowwiseTransformIterator { template <typename T> class RowwiseTransformIterator {
public: public:
RowwiseTransformIterator(const T *ptr, int n) : ptr_(ptr), i_(0), n_(n) {} RowwiseTransformIterator(const T *ptr, int n) : ptr_(ptr), i_(0), n_(n) {}
RowwiseTransformIterator<T> &operator++() { RowwiseTransformIterator<T> &operator++() {
...@@ -90,7 +90,7 @@ template <typename T> class RowwiseTransformIterator { ...@@ -90,7 +90,7 @@ template <typename T> class RowwiseTransformIterator {
const T &operator*() { return ptr_[i_]; } const T &operator*() { return ptr_[i_]; }
private: private:
const T *ptr_; const T *ptr_;
int i_; int i_;
int64_t n_; int64_t n_;
...@@ -101,7 +101,7 @@ template <typename T> class RowwiseTransformIterator { ...@@ -101,7 +101,7 @@ template <typename T> class RowwiseTransformIterator {
/// in (4,20,2) is 2 , /// in (4,20,2) is 2 ,
/// (20,1) move 1 stride , to fill(add) 2 element with the same number. /// (20,1) move 1 stride , to fill(add) 2 element with the same number.
template <typename T> class MidWiseTransformIterator { template <typename T> class MidWiseTransformIterator {
public: public:
MidWiseTransformIterator(const T *ptr, int n, int post) MidWiseTransformIterator(const T *ptr, int n, int post)
: ptr_(ptr), i_(0), j_(0), n_(n), post_(post) {} : ptr_(ptr), i_(0), j_(0), n_(n), post_(post) {}
...@@ -127,7 +127,7 @@ template <typename T> class MidWiseTransformIterator { ...@@ -127,7 +127,7 @@ template <typename T> class MidWiseTransformIterator {
const T &operator*() { return ptr_[i_]; } const T &operator*() { return ptr_[i_]; }
private: private:
const T *ptr_; const T *ptr_;
int64_t i_; int64_t i_;
int64_t j_; int64_t j_;
...@@ -137,7 +137,7 @@ template <typename T> class MidWiseTransformIterator { ...@@ -137,7 +137,7 @@ template <typename T> class MidWiseTransformIterator {
template <typename Functor, typename T, typename OutType = T> template <typename Functor, typename T, typename OutType = T>
class TransformFunctor { class TransformFunctor {
public: public:
TransformFunctor(const framework::Tensor *x, const framework::Tensor *y, TransformFunctor(const framework::Tensor *x, const framework::Tensor *y,
framework::Tensor *z, Functor func) framework::Tensor *z, Functor func)
: x_(x->data<T>()), y_(y->data<T>()), z_(z->mutable_data<OutType>()), : x_(x->data<T>()), y_(y->data<T>()), z_(z->mutable_data<OutType>()),
...@@ -156,11 +156,10 @@ class TransformFunctor { ...@@ -156,11 +156,10 @@ class TransformFunctor {
inline void RunMidWise(int n, int pre, int post) const { inline void RunMidWise(int n, int pre, int post) const {
math::Transform trans; math::Transform trans;
trans(x_, x_ + nx_, MidWiseTransformIterator<T>(y_, n, post), z_, trans(x_, x_ + nx_, MidWiseTransformIterator<T>(y_, n, post), z_, func_);
func_);
} }
private: private:
const T *x_; const T *x_;
const T *y_; const T *y_;
OutType *z_; OutType *z_;
......
...@@ -26,9 +26,8 @@ namespace math { ...@@ -26,9 +26,8 @@ namespace math {
* output_width] * output_width]
*/ */
template <class T> class Im2ColFunctor<ColFormat::kCFO, CPU, T> { template <class T> class Im2ColFunctor<ColFormat::kCFO, CPU, T> {
public: public:
void operator()(const framework::Tensor &im, void operator()(const framework::Tensor &im, const std::vector<int> &dilation,
const std::vector<int> &dilation,
const std::vector<int> &stride, const std::vector<int> &stride,
const std::vector<int> &padding, framework::Tensor *col) { const std::vector<int> &padding, framework::Tensor *col) {
// PADDLE_ENFORCE(im.dims().size() == 3); // PADDLE_ENFORCE(im.dims().size() == 3);
...@@ -72,17 +71,13 @@ template <class T> class Im2ColFunctor<ColFormat::kCFO, CPU, T> { ...@@ -72,17 +71,13 @@ template <class T> class Im2ColFunctor<ColFormat::kCFO, CPU, T> {
int h_offset = (c / filter_width) % filter_height; int h_offset = (c / filter_width) % filter_height;
int c_im = c / (filter_width * filter_height); int c_im = c / (filter_width * filter_height);
for (int h = 0; h < col_height; ++h) { for (int h = 0; h < col_height; ++h) {
int im_row_idx = int im_row_idx = h * stride[0] - padding[0] + h_offset * dilation[0];
h * stride[0] - padding[0] + h_offset * dilation[0];
for (int w = 0; w < col_width; ++w) { for (int w = 0; w < col_width; ++w) {
int im_col_idx = int im_col_idx = w * stride[1] - padding[1] + w_offset * dilation[1];
w * stride[1] - padding[1] + w_offset * dilation[1];
int col_idx = (c * col_height + h) * col_width + w; int col_idx = (c * col_height + h) * col_width + w;
int im_idx = int im_idx = (im_row_idx + c_im * im_height) * im_width + im_col_idx;
(im_row_idx + c_im * im_height) * im_width + im_col_idx;
col_data[col_idx] = col_data[col_idx] = (im_row_idx < 0 || im_row_idx >= im_height ||
(im_row_idx < 0 || im_row_idx >= im_height ||
im_col_idx < 0 || im_col_idx >= im_width) im_col_idx < 0 || im_col_idx >= im_width)
? static_cast<T>(0) ? static_cast<T>(0)
: im_data[im_idx]; : im_data[im_idx];
...@@ -99,7 +94,7 @@ template <class T> class Im2ColFunctor<ColFormat::kCFO, CPU, T> { ...@@ -99,7 +94,7 @@ template <class T> class Im2ColFunctor<ColFormat::kCFO, CPU, T> {
* output_width] * output_width]
*/ */
template <class T> class Col2ImFunctor<ColFormat::kCFO, CPU, T> { template <class T> class Col2ImFunctor<ColFormat::kCFO, CPU, T> {
public: public:
void operator()(const framework::Tensor &col, void operator()(const framework::Tensor &col,
const std::vector<int> &dilation, const std::vector<int> &dilation,
const std::vector<int> &stride, const std::vector<int> &stride,
...@@ -145,15 +140,12 @@ template <class T> class Col2ImFunctor<ColFormat::kCFO, CPU, T> { ...@@ -145,15 +140,12 @@ template <class T> class Col2ImFunctor<ColFormat::kCFO, CPU, T> {
int h_offset = (c / filter_width) % filter_height; int h_offset = (c / filter_width) % filter_height;
int c_im = c / (filter_width * filter_height); int c_im = c / (filter_width * filter_height);
for (int h = 0; h < col_height; ++h) { for (int h = 0; h < col_height; ++h) {
int im_row_idx = int im_row_idx = h * stride[0] - padding[0] + h_offset * dilation[0];
h * stride[0] - padding[0] + h_offset * dilation[0];
for (int w = 0; w < col_width; ++w) { for (int w = 0; w < col_width; ++w) {
int im_col_idx = int im_col_idx = w * stride[1] - padding[1] + w_offset * dilation[1];
w * stride[1] - padding[1] + w_offset * dilation[1];
if ((im_row_idx) >= 0 && (im_row_idx) < im_height && if ((im_row_idx) >= 0 && (im_row_idx) < im_height &&
(im_col_idx) >= 0 && (im_col_idx) < im_width) { (im_col_idx) >= 0 && (im_col_idx) < im_width) {
im_data[(im_row_idx + c_im * im_height) * im_width + im_data[(im_row_idx + c_im * im_height) * im_width + im_col_idx] +=
im_col_idx] +=
col_data[(c * col_height + h) * col_width + w]; col_data[(c * col_height + h) * col_width + w];
} }
} }
...@@ -174,9 +166,8 @@ template class Col2ImFunctor<ColFormat::kCFO, CPU, double>; ...@@ -174,9 +166,8 @@ template class Col2ImFunctor<ColFormat::kCFO, CPU, double>;
* filter_width] * filter_width]
*/ */
template <class T> class Im2ColFunctor<ColFormat::kOCF, CPU, T> { template <class T> class Im2ColFunctor<ColFormat::kOCF, CPU, T> {
public: public:
void operator()(const framework::Tensor &im, void operator()(const framework::Tensor &im, const std::vector<int> &dilation,
const std::vector<int> &dilation,
const std::vector<int> &stride, const std::vector<int> &stride,
const std::vector<int> &padding, framework::Tensor *col) { const std::vector<int> &padding, framework::Tensor *col) {
// PADDLE_ENFORCE(im.dims().size() == 3); // PADDLE_ENFORCE(im.dims().size() == 3);
...@@ -210,29 +201,25 @@ template <class T> class Im2ColFunctor<ColFormat::kOCF, CPU, T> { ...@@ -210,29 +201,25 @@ template <class T> class Im2ColFunctor<ColFormat::kOCF, CPU, T> {
for (int channel = 0; channel < im_channels; ++channel) { for (int channel = 0; channel < im_channels; ++channel) {
for (int filter_row_idx = 0; filter_row_idx < filter_height; for (int filter_row_idx = 0; filter_row_idx < filter_height;
++filter_row_idx) { ++filter_row_idx) {
int im_row_offset = col_row_idx * stride[0] + int im_row_offset =
filter_row_idx - padding[0]; col_row_idx * stride[0] + filter_row_idx - padding[0];
for (int filter_col_idx = 0; for (int filter_col_idx = 0; filter_col_idx < filter_width;
filter_col_idx < filter_width; ++filter_col_idx) { ++filter_col_idx) {
int im_col_offset = col_col_idx * stride[1] + int im_col_offset =
filter_col_idx - padding[1]; col_col_idx * stride[1] + filter_col_idx - padding[1];
int col_offset = int col_offset =
((((col_row_idx)*col_width + col_col_idx) * ((((col_row_idx)*col_width + col_col_idx) * im_channels +
im_channels +
channel) * channel) *
filter_height + filter_height +
filter_row_idx) * filter_row_idx) *
filter_width + filter_width +
filter_col_idx; filter_col_idx;
int im_offset = int im_offset = (channel * im_height + im_row_offset) * im_width +
(channel * im_height + im_row_offset) *
im_width +
im_col_offset; im_col_offset;
col_data[col_offset] = col_data[col_offset] =
(im_row_offset < 0 || (im_row_offset < 0 || im_row_offset >= im_height ||
im_row_offset >= im_height ||
im_col_offset < 0 || im_col_offset >= im_width) im_col_offset < 0 || im_col_offset >= im_width)
? static_cast<T>(0) ? static_cast<T>(0)
: im_data[im_offset]; : im_data[im_offset];
...@@ -251,7 +238,7 @@ template <class T> class Im2ColFunctor<ColFormat::kOCF, CPU, T> { ...@@ -251,7 +238,7 @@ template <class T> class Im2ColFunctor<ColFormat::kOCF, CPU, T> {
* filter_width] * filter_width]
*/ */
template <class T> class Col2ImFunctor<ColFormat::kOCF, CPU, T> { template <class T> class Col2ImFunctor<ColFormat::kOCF, CPU, T> {
public: public:
void operator()(const framework::Tensor &col, void operator()(const framework::Tensor &col,
const std::vector<int> &dilation, const std::vector<int> &dilation,
const std::vector<int> &stride, const std::vector<int> &stride,
...@@ -287,29 +274,25 @@ template <class T> class Col2ImFunctor<ColFormat::kOCF, CPU, T> { ...@@ -287,29 +274,25 @@ template <class T> class Col2ImFunctor<ColFormat::kOCF, CPU, T> {
for (int channel = 0; channel < im_channels; ++channel) { for (int channel = 0; channel < im_channels; ++channel) {
for (int filter_row_idx = 0; filter_row_idx < filter_height; for (int filter_row_idx = 0; filter_row_idx < filter_height;
++filter_row_idx) { ++filter_row_idx) {
int im_row_offset = col_row_idx * stride[0] + int im_row_offset =
filter_row_idx - padding[0]; col_row_idx * stride[0] + filter_row_idx - padding[0];
for (int filter_col_idx = 0; for (int filter_col_idx = 0; filter_col_idx < filter_width;
filter_col_idx < filter_width; ++filter_col_idx) { ++filter_col_idx) {
int im_col_offset = col_col_idx * stride[1] + int im_col_offset =
filter_col_idx - padding[1]; col_col_idx * stride[1] + filter_col_idx - padding[1];
int col_offset = int col_offset =
(((col_row_idx * col_width + col_col_idx) * (((col_row_idx * col_width + col_col_idx) * im_channels +
im_channels +
channel) * channel) *
filter_height + filter_height +
filter_row_idx) * filter_row_idx) *
filter_width + filter_width +
filter_col_idx; filter_col_idx;
if (im_row_offset >= 0 && if (im_row_offset >= 0 && im_row_offset < im_height &&
im_row_offset < im_height && im_col_offset >= 0 && im_col_offset < im_width) {
im_col_offset >= 0 &&
im_col_offset < im_width) {
int im_offset = int im_offset =
(channel * im_height + im_row_offset) * (channel * im_height + im_row_offset) * im_width +
im_width +
im_col_offset; im_col_offset;
im_data[im_offset] += col_data[col_offset]; im_data[im_offset] += col_data[col_offset];
} }
......
...@@ -89,16 +89,15 @@ enum class ColFormat { kCFO = 0, kOCF = 1 }; ...@@ -89,16 +89,15 @@ enum class ColFormat { kCFO = 0, kOCF = 1 };
*/ */
template <ColFormat Format, typename DeviceType, typename T> template <ColFormat Format, typename DeviceType, typename T>
class Im2ColFunctor { class Im2ColFunctor {
public: public:
void operator()(const framework::Tensor &im, void operator()(const framework::Tensor &im, const std::vector<int> &dilation,
const std::vector<int> &dilation,
const std::vector<int> &stride, const std::vector<int> &stride,
const std::vector<int> &padding, framework::Tensor *col); const std::vector<int> &padding, framework::Tensor *col);
}; };
template <ColFormat Format, typename DeviceType, typename T> template <ColFormat Format, typename DeviceType, typename T>
class Col2ImFunctor { class Col2ImFunctor {
public: public:
void operator()(const framework::Tensor &col, void operator()(const framework::Tensor &col,
const std::vector<int> &dilation, const std::vector<int> &dilation,
const std::vector<int> &stride, const std::vector<int> &stride,
......
...@@ -30,9 +30,8 @@ namespace math { ...@@ -30,9 +30,8 @@ namespace math {
*/ */
template <typename PoolProcess, typename T> template <typename PoolProcess, typename T>
class PoolFunctor<CPU, PoolProcess, T> { class PoolFunctor<CPU, PoolProcess, T> {
public: public:
void operator()(const framework::Tensor &input, void operator()(const framework::Tensor &input, const std::vector<int> &ksize,
const std::vector<int> &ksize,
const std::vector<int> &strides, const std::vector<int> &strides,
const std::vector<int> &paddings, PoolProcess pool_process, const std::vector<int> &paddings, PoolProcess pool_process,
framework::Tensor *output) { framework::Tensor *output) {
...@@ -77,8 +76,7 @@ class PoolFunctor<CPU, PoolProcess, T> { ...@@ -77,8 +76,7 @@ class PoolFunctor<CPU, PoolProcess, T> {
T ele = pool_process.initial(); T ele = pool_process.initial();
for (int h = hstart; h < hend; ++h) { for (int h = hstart; h < hend; ++h) {
for (int w = wstart; w < wend; ++w) { for (int w = wstart; w < wend; ++w) {
pool_process.compute( pool_process.compute(input_data[h * input_width + w], &ele);
input_data[h * input_width + w], &ele);
} }
} }
int pool_size = (hend - hstart) * (wend - wstart); int pool_size = (hend - hstart) * (wend - wstart);
......
...@@ -38,7 +38,7 @@ namespace math { ...@@ -38,7 +38,7 @@ namespace math {
* MaxPoolGrad and AvgPoolGrad are gradient operations respectively. * MaxPoolGrad and AvgPoolGrad are gradient operations respectively.
*/ */
template <class T> class MaxPool { template <class T> class MaxPool {
public: public:
inline T initial() { return static_cast<T>(-FLT_MAX); } inline T initial() { return static_cast<T>(-FLT_MAX); }
inline void compute(const T &x, T *y) { *y = *y > x ? *y : x; } inline void compute(const T &x, T *y) { *y = *y > x ? *y : x; }
...@@ -47,7 +47,7 @@ template <class T> class MaxPool { ...@@ -47,7 +47,7 @@ template <class T> class MaxPool {
}; };
template <class T> class AvgPool { template <class T> class AvgPool {
public: public:
inline T initial() { return static_cast<T>(0); } inline T initial() { return static_cast<T>(0); }
inline void compute(const T &x, T *y) { *y += x; } inline void compute(const T &x, T *y) { *y += x; }
...@@ -57,9 +57,8 @@ template <class T> class AvgPool { ...@@ -57,9 +57,8 @@ template <class T> class AvgPool {
template <typename DeviceType, typename PoolProcess, typename T> template <typename DeviceType, typename PoolProcess, typename T>
class PoolFunctor { class PoolFunctor {
public: public:
void operator()(const framework::Tensor &input, void operator()(const framework::Tensor &input, const std::vector<int> &ksize,
const std::vector<int> &ksize,
const std::vector<int> &strides, const std::vector<int> &strides,
const std::vector<int> &paddings, PoolProcess pool_compute, const std::vector<int> &paddings, PoolProcess pool_compute,
framework::Tensor *output); framework::Tensor *output);
......
...@@ -26,7 +26,7 @@ using Tensor = paddle_mobile::framework::Tensor; ...@@ -26,7 +26,7 @@ using Tensor = paddle_mobile::framework::Tensor;
* output_depth, output_height, output_width] * output_depth, output_height, output_width]
*/ */
template <typename T> class Vol2ColFunctor<CPU, T> { template <typename T> class Vol2ColFunctor<CPU, T> {
public: public:
void operator()(const Tensor &vol, const std::vector<int> &dilations, void operator()(const Tensor &vol, const std::vector<int> &dilations,
const std::vector<int> &strides, const std::vector<int> &strides,
const std::vector<int> &paddings, Tensor *col) const { const std::vector<int> &paddings, Tensor *col) const {
...@@ -81,28 +81,21 @@ template <typename T> class Vol2ColFunctor<CPU, T> { ...@@ -81,28 +81,21 @@ template <typename T> class Vol2ColFunctor<CPU, T> {
int d_offset = (c / filter_width / filter_height) % filter_depth; int d_offset = (c / filter_width / filter_height) % filter_depth;
int c_in = c / filter_width / filter_height / filter_depth; int c_in = c / filter_width / filter_height / filter_depth;
for (int d = 0; d < output_depth; ++d) { for (int d = 0; d < output_depth; ++d) {
int d_pad = int d_pad = d * strides[0] - paddings[0] + d_offset * dilations[0];
d * strides[0] - paddings[0] + d_offset * dilations[0];
for (int h = 0; h < output_height; ++h) { for (int h = 0; h < output_height; ++h) {
int h_pad = int h_pad = h * strides[1] - paddings[1] + h_offset * dilations[1];
h * strides[1] - paddings[1] + h_offset * dilations[1];
for (int w = 0; w < output_width; ++w) { for (int w = 0; w < output_width; ++w) {
int w_pad = w * strides[2] - paddings[2] + int w_pad = w * strides[2] - paddings[2] + w_offset * dilations[2];
w_offset * dilations[2];
int col_idx = int col_idx =
((c * output_depth + d) * output_height + h) * ((c * output_depth + d) * output_height + h) * output_width + w;
output_width +
w;
int vol_idx = int vol_idx =
((c_in * input_depth + d_pad) * input_height + ((c_in * input_depth + d_pad) * input_height + h_pad) *
h_pad) *
input_width + input_width +
w_pad; w_pad;
col_data[col_idx] = col_data[col_idx] =
(h_pad < 0 || h_pad >= input_height || w_pad < 0 || (h_pad < 0 || h_pad >= input_height || w_pad < 0 ||
w_pad >= input_width || d_pad < 0 || w_pad >= input_width || d_pad < 0 || d_pad >= input_depth)
d_pad >= input_depth)
? static_cast<T>(0) ? static_cast<T>(0)
: vol_data[vol_idx]; : vol_data[vol_idx];
} }
...@@ -119,7 +112,7 @@ template <typename T> class Vol2ColFunctor<CPU, T> { ...@@ -119,7 +112,7 @@ template <typename T> class Vol2ColFunctor<CPU, T> {
* output_depth, output_height, output_width] * output_depth, output_height, output_width]
*/ */
template <typename T> class Col2VolFunctor<CPU, T> { template <typename T> class Col2VolFunctor<CPU, T> {
public: public:
void operator()(const Tensor &col, const std::vector<int> &dilations, void operator()(const Tensor &col, const std::vector<int> &dilations,
const std::vector<int> &strides, const std::vector<int> &strides,
const std::vector<int> &paddings, Tensor *vol) const { const std::vector<int> &paddings, Tensor *vol) const {
...@@ -173,27 +166,21 @@ template <typename T> class Col2VolFunctor<CPU, T> { ...@@ -173,27 +166,21 @@ template <typename T> class Col2VolFunctor<CPU, T> {
int d_offset = (c / filter_width / filter_height) % filter_depth; int d_offset = (c / filter_width / filter_height) % filter_depth;
int cIm = c / filter_width / filter_height / filter_depth; int cIm = c / filter_width / filter_height / filter_depth;
for (int d = 0; d < output_depth; ++d) { for (int d = 0; d < output_depth; ++d) {
int d_pad = int d_pad = d * strides[0] - paddings[0] + d_offset * dilations[0];
d * strides[0] - paddings[0] + d_offset * dilations[0];
for (int h = 0; h < output_height; ++h) { for (int h = 0; h < output_height; ++h) {
int h_pad = int h_pad = h * strides[1] - paddings[1] + h_offset * dilations[1];
h * strides[1] - paddings[1] + h_offset * dilations[1];
for (int w = 0; w < output_width; ++w) { for (int w = 0; w < output_width; ++w) {
int w_pad = w * strides[2] - paddings[2] + int w_pad = w * strides[2] - paddings[2] + w_offset * dilations[2];
w_offset * dilations[2];
if (h_pad >= 0 && h_pad < input_height && w_pad >= 0 && if (h_pad >= 0 && h_pad < input_height && w_pad >= 0 &&
w_pad < input_width && d_pad >= 0 && w_pad < input_width && d_pad >= 0 && d_pad < input_depth) {
d_pad < input_depth) {
int vol_idx = int vol_idx =
((cIm * input_depth + d_pad) * input_height + ((cIm * input_depth + d_pad) * input_height + h_pad) *
h_pad) *
input_width + input_width +
w_pad; w_pad;
int col_idx = int col_idx =
((c * output_depth + d) * output_height + h) * ((c * output_depth + d) * output_height + h) * output_width +
output_width +
w; w;
vol_data[vol_idx] += col_data[col_idx]; vol_data[vol_idx] += col_data[col_idx];
} }
......
...@@ -73,14 +73,14 @@ namespace math { ...@@ -73,14 +73,14 @@ namespace math {
using Tensor = paddle_mobile::framework::Tensor; using Tensor = paddle_mobile::framework::Tensor;
template <typename DeviceType, typename T> class Vol2ColFunctor { template <typename DeviceType, typename T> class Vol2ColFunctor {
public: public:
void operator()(const Tensor &vol, const std::vector<int> &dilations, void operator()(const Tensor &vol, const std::vector<int> &dilations,
const std::vector<int> &strides, const std::vector<int> &strides,
const std::vector<int> &paddings, Tensor *col) const; const std::vector<int> &paddings, Tensor *col) const;
}; };
template <typename DeviceType, typename T> class Col2VolFunctor { template <typename DeviceType, typename T> class Col2VolFunctor {
public: public:
void operator()(const Tensor &col, const std::vector<int> &dilations, void operator()(const Tensor &col, const std::vector<int> &dilations,
const std::vector<int> &strides, const std::vector<int> &strides,
const std::vector<int> &paddings, Tensor *vol) const; const std::vector<int> &paddings, Tensor *vol) const;
......
...@@ -27,12 +27,12 @@ using namespace framework; ...@@ -27,12 +27,12 @@ using namespace framework;
template <typename DeviceType, typename T> template <typename DeviceType, typename T>
class MulOp : public framework::OperatorWithKernel<DeviceType> { class MulOp : public framework::OperatorWithKernel<DeviceType> {
public: public:
MulOp(const std::string &type, const VariableNameMap &inputs, MulOp(const std::string &type, const VariableNameMap &inputs,
const VariableNameMap &outputs, const framework::AttributeMap attrs, const VariableNameMap &outputs, const framework::AttributeMap attrs,
std::shared_ptr<framework::Scope> scope) std::shared_ptr<framework::Scope> scope)
: framework::OperatorWithKernel<DeviceType>(type, inputs, outputs, : framework::OperatorWithKernel<DeviceType>(type, inputs, outputs, attrs,
attrs, scope), scope),
param_(inputs, outputs, attrs, *scope) {} param_(inputs, outputs, attrs, *scope) {}
void Run() const { void Run() const {
...@@ -43,7 +43,7 @@ class MulOp : public framework::OperatorWithKernel<DeviceType> { ...@@ -43,7 +43,7 @@ class MulOp : public framework::OperatorWithKernel<DeviceType> {
using framework::OperatorWithKernel<DeviceType>::OperatorWithKernel; using framework::OperatorWithKernel<DeviceType>::OperatorWithKernel;
void InferShape() const override; void InferShape() const override;
protected: protected:
MulParam param_; MulParam param_;
}; };
......
...@@ -24,8 +24,7 @@ Print &operator<<(Print &printer, const ConvParam &conv_param) { ...@@ -24,8 +24,7 @@ Print &operator<<(Print &printer, const ConvParam &conv_param) {
printer << "parameter of conv: " printer << "parameter of conv: "
<< "\n"; << "\n";
printer << " stride: " printer << " stride: "
<< " (" << conv_param.Strides()[0] << conv_param.Strides()[1] << " (" << conv_param.Strides()[0] << conv_param.Strides()[1] << ") "
<< ") "
<< "\n"; << "\n";
printer << " paddings: " printer << " paddings: "
<< " (" << conv_param.Paddings()[0] << conv_param.Paddings()[1] << " (" << conv_param.Paddings()[0] << conv_param.Paddings()[1]
......
...@@ -31,8 +31,8 @@ namespace operators { ...@@ -31,8 +31,8 @@ namespace operators {
using namespace framework; using namespace framework;
class OpParam : PaddleMobileObject { class OpParam : PaddleMobileObject {
public: public:
protected: protected:
template <typename T> template <typename T>
static T *InputFrom(const VariableNameMap &inputs, const Scope &scope) { static T *InputFrom(const VariableNameMap &inputs, const Scope &scope) {
return GetVarValue<T>("Input", inputs, scope); return GetVarValue<T>("Input", inputs, scope);
...@@ -62,8 +62,7 @@ class OpParam : PaddleMobileObject { ...@@ -62,8 +62,7 @@ class OpParam : PaddleMobileObject {
return GetVarValue<T>("Mean", inputs, scope); return GetVarValue<T>("Mean", inputs, scope);
} }
template <typename T> template <typename T>
static T *InputScaleFrom(const VariableNameMap &inputs, static T *InputScaleFrom(const VariableNameMap &inputs, const Scope &scope) {
const Scope &scope) {
return GetVarValue<T>("Scale", inputs, scope); return GetVarValue<T>("Scale", inputs, scope);
} }
...@@ -104,8 +103,8 @@ class OpParam : PaddleMobileObject { ...@@ -104,8 +103,8 @@ class OpParam : PaddleMobileObject {
} }
template <typename T> template <typename T>
static T *GetVarValue(const std::string &key, static T *GetVarValue(const std::string &key, const VariableNameMap &var_map,
const VariableNameMap &var_map, const Scope &scope) { const Scope &scope) {
auto var_vec = var_map.at(key); auto var_vec = var_map.at(key);
if (!var_vec.empty()) { if (!var_vec.empty()) {
// std::cout << " get var value -- " << var_vec[0] << // std::cout << " get var value -- " << var_vec[0] <<
...@@ -133,7 +132,7 @@ class OpParam : PaddleMobileObject { ...@@ -133,7 +132,7 @@ class OpParam : PaddleMobileObject {
}; };
class ConvParam : OpParam { class ConvParam : OpParam {
public: public:
ConvParam(const VariableNameMap &inputs, const VariableNameMap &outputs, ConvParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
const framework::AttributeMap &attrs, const framework::AttributeMap &attrs,
const framework::Scope &scope) { const framework::Scope &scope) {
...@@ -160,7 +159,7 @@ class ConvParam : OpParam { ...@@ -160,7 +159,7 @@ class ConvParam : OpParam {
const int &Groups() const { return groups; } const int &Groups() const { return groups; }
private: private:
Tensor *input_; Tensor *input_;
Tensor *output_; Tensor *output_;
LoDTensor *filter_; LoDTensor *filter_;
...@@ -173,7 +172,7 @@ class ConvParam : OpParam { ...@@ -173,7 +172,7 @@ class ConvParam : OpParam {
Print &operator<<(Print &printer, const ConvParam &conv_param); Print &operator<<(Print &printer, const ConvParam &conv_param);
class ElementwiseAddParam : OpParam { class ElementwiseAddParam : OpParam {
public: public:
ElementwiseAddParam(const VariableNameMap &inputs, ElementwiseAddParam(const VariableNameMap &inputs,
const VariableNameMap &outputs, const VariableNameMap &outputs,
const framework::AttributeMap &attrs, const framework::AttributeMap &attrs,
...@@ -192,7 +191,7 @@ class ElementwiseAddParam : OpParam { ...@@ -192,7 +191,7 @@ class ElementwiseAddParam : OpParam {
const int &Axis() const { return axis_; } const int &Axis() const { return axis_; }
private: private:
Tensor *input_x_; Tensor *input_x_;
Tensor *input_y_; Tensor *input_y_;
Tensor *out_; Tensor *out_;
...@@ -200,7 +199,7 @@ class ElementwiseAddParam : OpParam { ...@@ -200,7 +199,7 @@ class ElementwiseAddParam : OpParam {
}; };
class MulParam : OpParam { class MulParam : OpParam {
public: public:
MulParam(const VariableNameMap &inputs, const VariableNameMap &outputs, MulParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
const framework::AttributeMap &attrs, const framework::AttributeMap &attrs,
const framework::Scope &scope) { const framework::Scope &scope) {
...@@ -221,7 +220,7 @@ class MulParam : OpParam { ...@@ -221,7 +220,7 @@ class MulParam : OpParam {
const int &YNumColDims() const { return y_num_col_dims_; } const int &YNumColDims() const { return y_num_col_dims_; }
private: private:
Tensor *input_x_; Tensor *input_x_;
Tensor *input_y_; Tensor *input_y_;
Tensor *out_; Tensor *out_;
...@@ -230,7 +229,7 @@ class MulParam : OpParam { ...@@ -230,7 +229,7 @@ class MulParam : OpParam {
}; };
class ConcatParam : public OpParam { class ConcatParam : public OpParam {
public: public:
ConcatParam(const VariableNameMap &inputs, const VariableNameMap &outputs, ConcatParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
const framework::AttributeMap &attrs, const framework::AttributeMap &attrs,
const framework::Scope &scope) { const framework::Scope &scope) {
...@@ -245,14 +244,14 @@ class ConcatParam : public OpParam { ...@@ -245,14 +244,14 @@ class ConcatParam : public OpParam {
const int &Axis() const { return axis_; } const int &Axis() const { return axis_; }
private: private:
std::vector<Tensor *> inputs_; std::vector<Tensor *> inputs_;
Tensor *out_; Tensor *out_;
int axis_; int axis_;
}; };
class LrnParam : public OpParam { class LrnParam : public OpParam {
public: public:
LrnParam(const VariableNameMap &inputs, const VariableNameMap &outputs, LrnParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
const framework::AttributeMap &attrs, const framework::AttributeMap &attrs,
const framework::Scope &scope) { const framework::Scope &scope) {
...@@ -282,7 +281,7 @@ class LrnParam : public OpParam { ...@@ -282,7 +281,7 @@ class LrnParam : public OpParam {
const std::string &DataFormat() const { return data_format_; } const std::string &DataFormat() const { return data_format_; }
private: private:
Tensor *input_x_; Tensor *input_x_;
Tensor *out_; Tensor *out_;
Tensor *mid_out_; Tensor *mid_out_;
...@@ -293,9 +292,8 @@ class LrnParam : public OpParam { ...@@ -293,9 +292,8 @@ class LrnParam : public OpParam {
std::string data_format_; std::string data_format_;
}; };
class BatchNormParam : OpParam { class BatchNormParam : OpParam {
public: public:
BatchNormParam(const VariableNameMap &inputs, BatchNormParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
const VariableNameMap &outputs,
const framework::AttributeMap &attrs, const framework::AttributeMap &attrs,
const framework::Scope &scope) { const framework::Scope &scope) {
input_x_ = InputXFrom<framework::Tensor>(inputs, scope); input_x_ = InputXFrom<framework::Tensor>(inputs, scope);
...@@ -329,7 +327,7 @@ class BatchNormParam : OpParam { ...@@ -329,7 +327,7 @@ class BatchNormParam : OpParam {
const std::string &DataFormat() const { return data_format_; } const std::string &DataFormat() const { return data_format_; }
private: private:
Tensor *input_x_; Tensor *input_x_;
Tensor *output_y_; Tensor *output_y_;
Tensor *input_bias_; Tensor *input_bias_;
...@@ -342,7 +340,7 @@ class BatchNormParam : OpParam { ...@@ -342,7 +340,7 @@ class BatchNormParam : OpParam {
std::string data_format_; std::string data_format_;
}; };
class PoolParam : public OpParam { class PoolParam : public OpParam {
public: public:
PoolParam(const VariableNameMap &inputs, const VariableNameMap &outputs, PoolParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
const framework::AttributeMap &attrs, const framework::AttributeMap &attrs,
const framework::Scope &scope) { const framework::Scope &scope) {
...@@ -373,7 +371,7 @@ class PoolParam : public OpParam { ...@@ -373,7 +371,7 @@ class PoolParam : public OpParam {
bool isGlobalPooling() const { return gloabal_pooling_; } bool isGlobalPooling() const { return gloabal_pooling_; }
private: private:
Tensor *input_; Tensor *input_;
Tensor *output_; Tensor *output_;
std::string pooling_type_; std::string pooling_type_;
......
...@@ -49,8 +49,8 @@ void PoolOp<DeviceType, T>::InferShape() const { ...@@ -49,8 +49,8 @@ void PoolOp<DeviceType, T>::InferShape() const {
} }
std::vector<int64_t> output_shape({in_x_dims[0], in_x_dims[1]}); std::vector<int64_t> output_shape({in_x_dims[0], in_x_dims[1]});
for (size_t i = 0; i < ksize.size(); ++i) { for (size_t i = 0; i < ksize.size(); ++i) {
output_shape.push_back(PoolOutputSize( output_shape.push_back(PoolOutputSize(in_x_dims[i + 2], ksize[i],
in_x_dims[i + 2], ksize[i], paddings[i], strides[i], ceil_mode)); paddings[i], strides[i], ceil_mode));
} }
param_.Output()->Resize(framework::make_ddim(output_shape)); param_.Output()->Resize(framework::make_ddim(output_shape));
DLOG << "infer shape out size =" << param_.Output()->numel(); DLOG << "infer shape out size =" << param_.Output()->numel();
......
...@@ -28,12 +28,12 @@ using namespace framework; ...@@ -28,12 +28,12 @@ using namespace framework;
template <typename DeviceType, typename T> template <typename DeviceType, typename T>
class PoolOp : public framework::OperatorWithKernel<DeviceType> { class PoolOp : public framework::OperatorWithKernel<DeviceType> {
public: public:
PoolOp(const std::string &type, const VariableNameMap &inputs, PoolOp(const std::string &type, const VariableNameMap &inputs,
const VariableNameMap &outputs, const framework::AttributeMap &attrs, const VariableNameMap &outputs, const framework::AttributeMap &attrs,
std::shared_ptr<framework::Scope> scope) std::shared_ptr<framework::Scope> scope)
: framework::OperatorWithKernel<DeviceType>(type, inputs, outputs, : framework::OperatorWithKernel<DeviceType>(type, inputs, outputs, attrs,
attrs, scope), scope),
param_(inputs, outputs, attrs, *scope) {} param_(inputs, outputs, attrs, *scope) {}
using framework::OperatorWithKernel<DeviceType>::OperatorWithKernel; using framework::OperatorWithKernel<DeviceType>::OperatorWithKernel;
void InferShape() const override; void InferShape() const override;
...@@ -45,7 +45,7 @@ class PoolOp : public framework::OperatorWithKernel<DeviceType> { ...@@ -45,7 +45,7 @@ class PoolOp : public framework::OperatorWithKernel<DeviceType> {
this->ClearVariables({"X"}); this->ClearVariables({"X"});
} }
private: private:
PoolParam param_; PoolParam param_;
}; };
} // namespace operators } // namespace operators
......
...@@ -17,7 +17,7 @@ limitations under the License. */ ...@@ -17,7 +17,7 @@ limitations under the License. */
// Disable the copy and assignment operator for a class. // Disable the copy and assignment operator for a class.
#ifndef DISABLE_COPY_AND_ASSIGN #ifndef DISABLE_COPY_AND_ASSIGN
#define DISABLE_COPY_AND_ASSIGN(classname) \ #define DISABLE_COPY_AND_ASSIGN(classname) \
private: \ private: \
classname(const classname &) = delete; \ classname(const classname &) = delete; \
classname(classname &&) = delete; \ classname(classname &&) = delete; \
classname &operator=(const classname &) = delete; \ classname &operator=(const classname &) = delete; \
......
...@@ -38,8 +38,8 @@ Executor4Test<DeviceType, OpType>::Executor4Test(const Program<DeviceType> p, ...@@ -38,8 +38,8 @@ Executor4Test<DeviceType, OpType>::Executor4Test(const Program<DeviceType> p,
std::shared_ptr<OpDesc> op = ops[j]; std::shared_ptr<OpDesc> op = ops[j];
if (op->Type() == op_type) { if (op->Type() == op_type) {
std::shared_ptr<OpType> op_ptr = std::make_shared<OpType>( std::shared_ptr<OpType> op_ptr = std::make_shared<OpType>(
op->Type(), op->GetInputs(), op->GetOutputs(), op->Type(), op->GetInputs(), op->GetOutputs(), op->GetAttrMap(),
op->GetAttrMap(), this->program_.scope); this->program_.scope);
this->ops_of_block_[*block_desc.get()].push_back(op_ptr); this->ops_of_block_[*block_desc.get()].push_back(op_ptr);
break; break;
......
...@@ -27,7 +27,7 @@ using namespace paddle_mobile::framework; ...@@ -27,7 +27,7 @@ using namespace paddle_mobile::framework;
template <typename DeviceType, typename OpType> template <typename DeviceType, typename OpType>
class Executor4Test : public Executor<DeviceType> { class Executor4Test : public Executor<DeviceType> {
public: public:
Executor4Test(const Program<DeviceType> p, std::string op_type); Executor4Test(const Program<DeviceType> p, std::string op_type);
std::shared_ptr<Tensor> predict(Tensor &t, std::string input, std::shared_ptr<Tensor> predict(Tensor &t, std::string input,
......
...@@ -24,7 +24,7 @@ namespace paddle_mobile { ...@@ -24,7 +24,7 @@ namespace paddle_mobile {
namespace framework { namespace framework {
template <typename Dtype> class TestBatchNormOp { template <typename Dtype> class TestBatchNormOp {
public: public:
explicit TestBatchNormOp(const Program<Dtype> p) : program_(p) { explicit TestBatchNormOp(const Program<Dtype> p) : program_(p) {
if (use_optimize_) { if (use_optimize_) {
to_predict_program_ = program_.optimizeProgram; to_predict_program_ = program_.optimizeProgram;
...@@ -52,8 +52,7 @@ template <typename Dtype> class TestBatchNormOp { ...@@ -52,8 +52,7 @@ template <typename Dtype> class TestBatchNormOp {
DLOG << " Input Scale is : " << op->Input("Scale")[0]; DLOG << " Input Scale is : " << op->Input("Scale")[0];
DLOG << " Input Bias is : " << op->Input("Bias")[0]; DLOG << " Input Bias is : " << op->Input("Bias")[0];
DLOG << " Output Y is : " << op->Output("Y")[0]; DLOG << " Output Y is : " << op->Output("Y")[0];
DLOG << " epsilon : " DLOG << " epsilon : " << op->GetAttrMap().at("epsilon").Get<float>();
<< op->GetAttrMap().at("epsilon").Get<float>();
std::shared_ptr<operators::BatchNormOp<Dtype, float>> lrn = std::shared_ptr<operators::BatchNormOp<Dtype, float>> lrn =
std::make_shared<operators::BatchNormOp<Dtype, float>>( std::make_shared<operators::BatchNormOp<Dtype, float>>(
op->Type(), op->GetInputs(), op->GetOutputs(), op->Type(), op->GetInputs(), op->GetOutputs(),
...@@ -101,7 +100,7 @@ template <typename Dtype> class TestBatchNormOp { ...@@ -101,7 +100,7 @@ template <typename Dtype> class TestBatchNormOp {
return out_tensor; return out_tensor;
} }
private: private:
const framework::Program<Dtype> program_; const framework::Program<Dtype> program_;
std::shared_ptr<ProgramDesc> to_predict_program_; std::shared_ptr<ProgramDesc> to_predict_program_;
std::map<framework::BlockDesc, std::map<framework::BlockDesc,
...@@ -113,8 +112,7 @@ template <typename Dtype> class TestBatchNormOp { ...@@ -113,8 +112,7 @@ template <typename Dtype> class TestBatchNormOp {
const Tensor &t4, const Tensor &t5, int block_id) { const Tensor &t4, const Tensor &t5, int block_id) {
std::shared_ptr<BlockDesc> to_predict_block = std::shared_ptr<BlockDesc> to_predict_block =
to_predict_program_->Block(block_id); to_predict_program_->Block(block_id);
for (int j = 0; j < ops_of_block_[*to_predict_block.get()].size(); for (int j = 0; j < ops_of_block_[*to_predict_block.get()].size(); ++j) {
++j) {
auto op = ops_of_block_[*to_predict_block.get()][j]; auto op = ops_of_block_[*to_predict_block.get()][j];
DLOG << "op -> run()"; DLOG << "op -> run()";
op->Run(); op->Run();
...@@ -140,8 +138,7 @@ int main() { ...@@ -140,8 +138,7 @@ int main() {
auto *inputx1_ptr = inputx1.data<float>(); auto *inputx1_ptr = inputx1.data<float>();
paddle_mobile::framework::Tensor mean; paddle_mobile::framework::Tensor mean;
SetupTensor<float>(&mean, {10}, static_cast<float>(0), SetupTensor<float>(&mean, {10}, static_cast<float>(0), static_cast<float>(1));
static_cast<float>(1));
auto *mean_ptr = mean.data<float>(); auto *mean_ptr = mean.data<float>();
paddle_mobile::framework::Tensor scale; paddle_mobile::framework::Tensor scale;
...@@ -155,12 +152,11 @@ int main() { ...@@ -155,12 +152,11 @@ int main() {
auto *variance_ptr = variance.data<float>(); auto *variance_ptr = variance.data<float>();
paddle_mobile::framework::Tensor bias; paddle_mobile::framework::Tensor bias;
SetupTensor<float>(&bias, {10}, static_cast<float>(0), SetupTensor<float>(&bias, {10}, static_cast<float>(0), static_cast<float>(1));
static_cast<float>(1));
auto *bias_ptr = bias.data<float>(); auto *bias_ptr = bias.data<float>();
paddle_mobile::framework::TestBatchNormOp<paddle_mobile::CPU> paddle_mobile::framework::TestBatchNormOp<paddle_mobile::CPU> testBatchNormOp(
testBatchNormOp(program); program);
auto output_bn = auto output_bn =
testBatchNormOp.predict_bn(inputx1, mean, scale, variance, bias); testBatchNormOp.predict_bn(inputx1, mean, scale, variance, bias);
......
...@@ -24,7 +24,7 @@ namespace paddle_mobile { ...@@ -24,7 +24,7 @@ namespace paddle_mobile {
namespace framework { namespace framework {
template <typename Dtype> class TestConcatOp { template <typename Dtype> class TestConcatOp {
public: public:
explicit TestConcatOp(const Program<Dtype> p) : program_(p) { explicit TestConcatOp(const Program<Dtype> p) : program_(p) {
if (use_optimize_) { if (use_optimize_) {
to_predict_program_ = program_.optimizeProgram; to_predict_program_ = program_.optimizeProgram;
...@@ -41,15 +41,13 @@ template <typename Dtype> class TestConcatOp { ...@@ -41,15 +41,13 @@ template <typename Dtype> class TestConcatOp {
// DLOG << " ops " << ops.size(); // DLOG << " ops " << ops.size();
for (int j = 0; j < ops.size(); ++j) { for (int j = 0; j < ops.size(); ++j) {
std::shared_ptr<OpDesc> op = ops[j]; std::shared_ptr<OpDesc> op = ops[j];
if (op->Type() == "concat" && if (op->Type() == "concat" && op->Input("X")[0] == "conv2d_3.tmp_1") {
op->Input("X")[0] == "conv2d_3.tmp_1") {
DLOG << " mul attr size: " << op->GetAttrMap().size(); DLOG << " mul attr size: " << op->GetAttrMap().size();
DLOG << " inputs size: " << op->GetInputs().size(); DLOG << " inputs size: " << op->GetInputs().size();
DLOG << " outputs size: " << op->GetOutputs().size(); DLOG << " outputs size: " << op->GetOutputs().size();
DLOG << " Input X is : " << op->Input("X")[0]; DLOG << " Input X is : " << op->Input("X")[0];
DLOG << " Output Out is : " << op->Output("Out")[0]; DLOG << " Output Out is : " << op->Output("Out")[0];
DLOG << " axis : " DLOG << " axis : " << op->GetAttrMap().at("axis").Get<int>();
<< op->GetAttrMap().at("axis").Get<int>();
std::shared_ptr<operators::ConcatOp<Dtype, float>> concat = std::shared_ptr<operators::ConcatOp<Dtype, float>> concat =
std::make_shared<operators::ConcatOp<Dtype, float>>( std::make_shared<operators::ConcatOp<Dtype, float>>(
...@@ -94,7 +92,7 @@ template <typename Dtype> class TestConcatOp { ...@@ -94,7 +92,7 @@ template <typename Dtype> class TestConcatOp {
return out_tensor; return out_tensor;
} }
private: private:
const framework::Program<Dtype> program_; const framework::Program<Dtype> program_;
std::shared_ptr<ProgramDesc> to_predict_program_; std::shared_ptr<ProgramDesc> to_predict_program_;
std::map<framework::BlockDesc, std::map<framework::BlockDesc,
...@@ -106,8 +104,7 @@ template <typename Dtype> class TestConcatOp { ...@@ -106,8 +104,7 @@ template <typename Dtype> class TestConcatOp {
const Tensor &t4, int block_id) { const Tensor &t4, int block_id) {
std::shared_ptr<BlockDesc> to_predict_block = std::shared_ptr<BlockDesc> to_predict_block =
to_predict_program_->Block(block_id); to_predict_program_->Block(block_id);
for (int j = 0; j < ops_of_block_[*to_predict_block.get()].size(); for (int j = 0; j < ops_of_block_[*to_predict_block.get()].size(); ++j) {
++j) {
auto op = ops_of_block_[*to_predict_block.get()][j]; auto op = ops_of_block_[*to_predict_block.get()][j];
DLOG << "op -> run()"; DLOG << "op -> run()";
op->Run(); op->Run();
...@@ -168,8 +165,7 @@ int main() { ...@@ -168,8 +165,7 @@ int main() {
/// output (4,100,2,2) /// output (4,100,2,2)
int input_index = int input_index =
input_n * stride0 + input_c * stride1 + input_h * stride2 + input_w; input_n * stride0 + input_c * stride1 + input_h * stride2 + input_w;
int output_index = int output_index = input_n * 100 * 2 * 2 +
input_n * 100 * 2 * 2 +
(input_c + inputx1.dims()[1] + inputx2.dims()[1]) * 2 * 2 + (input_c + inputx1.dims()[1] + inputx2.dims()[1]) * 2 * 2 +
input_h * 2 + input_w; input_h * 2 + input_w;
......
...@@ -24,7 +24,7 @@ namespace paddle_mobile { ...@@ -24,7 +24,7 @@ namespace paddle_mobile {
namespace framework { namespace framework {
template <typename Dtype> class TestElementwiseAddOp { template <typename Dtype> class TestElementwiseAddOp {
public: public:
explicit TestElementwiseAddOp(const Program<Dtype> p) : program_(p) { explicit TestElementwiseAddOp(const Program<Dtype> p) : program_(p) {
if (use_optimize_) { if (use_optimize_) {
to_predict_program_ = program_.optimizeProgram; to_predict_program_ = program_.optimizeProgram;
...@@ -43,8 +43,7 @@ template <typename Dtype> class TestElementwiseAddOp { ...@@ -43,8 +43,7 @@ template <typename Dtype> class TestElementwiseAddOp {
std::shared_ptr<OpDesc> op = ops[j]; std::shared_ptr<OpDesc> op = ops[j];
if (op->Type() == "elementwise_add" && if (op->Type() == "elementwise_add" &&
op->Input("X")[0] == "batch_norm_2.tmp_2") { op->Input("X")[0] == "batch_norm_2.tmp_2") {
DLOG << " elementwise_add attr size: " DLOG << " elementwise_add attr size: " << op->GetAttrMap().size();
<< op->GetAttrMap().size();
DLOG << " inputs size: " << op->GetInputs().size(); DLOG << " inputs size: " << op->GetInputs().size();
DLOG << " outputs size: " << op->GetOutputs().size(); DLOG << " outputs size: " << op->GetOutputs().size();
DLOG << " Input X is : " << op->Input("X")[0]; DLOG << " Input X is : " << op->Input("X")[0];
...@@ -54,9 +53,8 @@ template <typename Dtype> class TestElementwiseAddOp { ...@@ -54,9 +53,8 @@ template <typename Dtype> class TestElementwiseAddOp {
int axis = axis_attr.Get<int>(); int axis = axis_attr.Get<int>();
DLOG << " Attr axis is : " << axis; DLOG << " Attr axis is : " << axis;
std::shared_ptr<operators::ElementwiseAddOp<Dtype, float>> std::shared_ptr<operators::ElementwiseAddOp<Dtype, float>> add =
add = std::make_shared< std::make_shared<operators::ElementwiseAddOp<Dtype, float>>(
operators::ElementwiseAddOp<Dtype, float>>(
op->Type(), op->GetInputs(), op->GetOutputs(), op->Type(), op->GetInputs(), op->GetOutputs(),
op->GetAttrMap(), program_.scope); op->GetAttrMap(), program_.scope);
ops_of_block_[*block_desc.get()].push_back(add); ops_of_block_[*block_desc.get()].push_back(add);
...@@ -89,7 +87,7 @@ template <typename Dtype> class TestElementwiseAddOp { ...@@ -89,7 +87,7 @@ template <typename Dtype> class TestElementwiseAddOp {
return out_tensor; return out_tensor;
} }
private: private:
const framework::Program<Dtype> program_; const framework::Program<Dtype> program_;
std::shared_ptr<ProgramDesc> to_predict_program_; std::shared_ptr<ProgramDesc> to_predict_program_;
std::map<framework::BlockDesc, std::map<framework::BlockDesc,
...@@ -100,8 +98,7 @@ template <typename Dtype> class TestElementwiseAddOp { ...@@ -100,8 +98,7 @@ template <typename Dtype> class TestElementwiseAddOp {
void predict_add(const Tensor &t1, const Tensor &t2, int block_id) { void predict_add(const Tensor &t1, const Tensor &t2, int block_id) {
std::shared_ptr<BlockDesc> to_predict_block = std::shared_ptr<BlockDesc> to_predict_block =
to_predict_program_->Block(block_id); to_predict_program_->Block(block_id);
for (int j = 0; j < ops_of_block_[*to_predict_block.get()].size(); for (int j = 0; j < ops_of_block_[*to_predict_block.get()].size(); ++j) {
++j) {
auto op = ops_of_block_[*to_predict_block.get()][j]; auto op = ops_of_block_[*to_predict_block.get()][j];
DLOG << "op -> run()"; DLOG << "op -> run()";
op->Run(); op->Run();
......
...@@ -24,7 +24,7 @@ namespace paddle_mobile { ...@@ -24,7 +24,7 @@ namespace paddle_mobile {
namespace framework { namespace framework {
template <typename Dtype> class TestLrnOp { template <typename Dtype> class TestLrnOp {
public: public:
explicit TestLrnOp(const Program<Dtype> p) : program_(p) { explicit TestLrnOp(const Program<Dtype> p) : program_(p) {
if (use_optimize_) { if (use_optimize_) {
to_predict_program_ = program_.optimizeProgram; to_predict_program_ = program_.optimizeProgram;
...@@ -41,18 +41,15 @@ template <typename Dtype> class TestLrnOp { ...@@ -41,18 +41,15 @@ template <typename Dtype> class TestLrnOp {
// DLOG << " ops " << ops.size(); // DLOG << " ops " << ops.size();
for (int j = 0; j < ops.size(); ++j) { for (int j = 0; j < ops.size(); ++j) {
std::shared_ptr<OpDesc> op = ops[j]; std::shared_ptr<OpDesc> op = ops[j];
if (op->Type() == "lrn" && if (op->Type() == "lrn" && op->Input("X")[0] == "pool2d_0.tmp_0") {
op->Input("X")[0] == "pool2d_0.tmp_0") {
DLOG << " mul attr size: " << op->GetAttrMap().size(); DLOG << " mul attr size: " << op->GetAttrMap().size();
DLOG << " inputs size: " << op->GetInputs().size(); DLOG << " inputs size: " << op->GetInputs().size();
DLOG << " outputs size: " << op->GetOutputs().size(); DLOG << " outputs size: " << op->GetOutputs().size();
DLOG << " Input X is : " << op->Input("X")[0]; DLOG << " Input X is : " << op->Input("X")[0];
DLOG << " Output Out is : " << op->Output("Out")[0]; DLOG << " Output Out is : " << op->Output("Out")[0];
DLOG << " n : " << op->GetAttrMap().at("n").Get<int>(); DLOG << " n : " << op->GetAttrMap().at("n").Get<int>();
DLOG << " alpha : " DLOG << " alpha : " << op->GetAttrMap().at("alpha").Get<float>();
<< op->GetAttrMap().at("alpha").Get<float>(); DLOG << " beta : " << op->GetAttrMap().at("beta").Get<float>();
DLOG << " beta : "
<< op->GetAttrMap().at("beta").Get<float>();
DLOG << " k : " << op->GetAttrMap().at("k").Get<float>(); DLOG << " k : " << op->GetAttrMap().at("k").Get<float>();
std::shared_ptr<operators::LrnOp<Dtype, float>> lrn = std::shared_ptr<operators::LrnOp<Dtype, float>> lrn =
std::make_shared<operators::LrnOp<Dtype, float>>( std::make_shared<operators::LrnOp<Dtype, float>>(
...@@ -84,7 +81,7 @@ template <typename Dtype> class TestLrnOp { ...@@ -84,7 +81,7 @@ template <typename Dtype> class TestLrnOp {
return out_tensor; return out_tensor;
} }
private: private:
const framework::Program<Dtype> program_; const framework::Program<Dtype> program_;
std::shared_ptr<ProgramDesc> to_predict_program_; std::shared_ptr<ProgramDesc> to_predict_program_;
std::map<framework::BlockDesc, std::map<framework::BlockDesc,
...@@ -95,8 +92,7 @@ template <typename Dtype> class TestLrnOp { ...@@ -95,8 +92,7 @@ template <typename Dtype> class TestLrnOp {
void predict_lrn(const Tensor &t1, int block_id) { void predict_lrn(const Tensor &t1, int block_id) {
std::shared_ptr<BlockDesc> to_predict_block = std::shared_ptr<BlockDesc> to_predict_block =
to_predict_program_->Block(block_id); to_predict_program_->Block(block_id);
for (int j = 0; j < ops_of_block_[*to_predict_block.get()].size(); for (int j = 0; j < ops_of_block_[*to_predict_block.get()].size(); ++j) {
++j) {
auto op = ops_of_block_[*to_predict_block.get()][j]; auto op = ops_of_block_[*to_predict_block.get()][j];
DLOG << "op -> run()"; DLOG << "op -> run()";
op->Run(); op->Run();
...@@ -151,9 +147,8 @@ int main() { ...@@ -151,9 +147,8 @@ int main() {
} }
DLOGF("\n"); DLOGF("\n");
} }
DLOG << inputx1_ptr[0] << " / ((1 + 0.00002 * ( " << inputx1_ptr[0] DLOG << inputx1_ptr[0] << " / ((1 + 0.00002 * ( " << inputx1_ptr[0] << "^2 + "
<< "^2 + " << inputx1_ptr[4] << "^2 + " << inputx1_ptr[8] << inputx1_ptr[4] << "^2 + " << inputx1_ptr[8] << "^2 ))^0.75) = ";
<< "^2 ))^0.75) = ";
DLOG << output_lrn_ptr[0]; DLOG << output_lrn_ptr[0];
return 0; return 0;
} }
...@@ -24,7 +24,7 @@ namespace paddle_mobile { ...@@ -24,7 +24,7 @@ namespace paddle_mobile {
namespace framework { namespace framework {
template <typename Dtype> class TestMulOp { template <typename Dtype> class TestMulOp {
public: public:
explicit TestMulOp(const Program<Dtype> p) : program_(p) { explicit TestMulOp(const Program<Dtype> p) : program_(p) {
if (use_optimize_) { if (use_optimize_) {
to_predict_program_ = program_.optimizeProgram; to_predict_program_ = program_.optimizeProgram;
...@@ -41,8 +41,7 @@ template <typename Dtype> class TestMulOp { ...@@ -41,8 +41,7 @@ template <typename Dtype> class TestMulOp {
// DLOG << " ops " << ops.size(); // DLOG << " ops " << ops.size();
for (int j = 0; j < ops.size(); ++j) { for (int j = 0; j < ops.size(); ++j) {
std::shared_ptr<OpDesc> op = ops[j]; std::shared_ptr<OpDesc> op = ops[j];
if (op->Type() == "mul" && if (op->Type() == "mul" && op->Input("X")[0] == "pool2d_0.tmp_0") {
op->Input("X")[0] == "pool2d_0.tmp_0") {
DLOG << " mul attr size: " << op->GetAttrMap().size(); DLOG << " mul attr size: " << op->GetAttrMap().size();
DLOG << " inputs size: " << op->GetInputs().size(); DLOG << " inputs size: " << op->GetInputs().size();
DLOG << " outputs size: " << op->GetOutputs().size(); DLOG << " outputs size: " << op->GetOutputs().size();
...@@ -88,7 +87,7 @@ template <typename Dtype> class TestMulOp { ...@@ -88,7 +87,7 @@ template <typename Dtype> class TestMulOp {
return out_tensor; return out_tensor;
} }
private: private:
const framework::Program<Dtype> program_; const framework::Program<Dtype> program_;
std::shared_ptr<ProgramDesc> to_predict_program_; std::shared_ptr<ProgramDesc> to_predict_program_;
std::map<framework::BlockDesc, std::map<framework::BlockDesc,
...@@ -99,8 +98,7 @@ template <typename Dtype> class TestMulOp { ...@@ -99,8 +98,7 @@ template <typename Dtype> class TestMulOp {
void predict_mul(const Tensor &t1, const Tensor &t2, int block_id) { void predict_mul(const Tensor &t1, const Tensor &t2, int block_id) {
std::shared_ptr<BlockDesc> to_predict_block = std::shared_ptr<BlockDesc> to_predict_block =
to_predict_program_->Block(block_id); to_predict_program_->Block(block_id);
for (int j = 0; j < ops_of_block_[*to_predict_block.get()].size(); for (int j = 0; j < ops_of_block_[*to_predict_block.get()].size(); ++j) {
++j) {
auto op = ops_of_block_[*to_predict_block.get()][j]; auto op = ops_of_block_[*to_predict_block.get()][j];
DLOG << "op -> run()"; DLOG << "op -> run()";
op->Run(); op->Run();
......
...@@ -30,7 +30,6 @@ void SetupTensor(paddle_mobile::framework::Tensor *input, ...@@ -30,7 +30,6 @@ void SetupTensor(paddle_mobile::framework::Tensor *input,
T *input_ptr = input->mutable_data<T>(dims); T *input_ptr = input->mutable_data<T>(dims);
for (int i = 0; i < input->numel(); ++i) { for (int i = 0; i < input->numel(); ++i) {
input_ptr[i] = input_ptr[i] = static_cast<T>(uniform_dist(rng) * (upper - lower) + lower);
static_cast<T>(uniform_dist(rng) * (upper - lower) + lower);
} }
} }
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册