From 28f6506d694a1e3834d14c2b8afabe22292e92fa Mon Sep 17 00:00:00 2001 From: huzhiqiang <912790387@qq.com> Date: Sat, 22 Feb 2020 13:15:18 +0800 Subject: [PATCH] =?UTF-8?q?=E3=80=90doc=20and=20opt=20tool=E3=80=91add=20a?= =?UTF-8?q?uto=5Ftransform.sh=20into=20Paddle-Lite=20project=20and=20modif?= =?UTF-8?q?y=20related=20docs=20#2973=20(#2974)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/user_guides/model_optimize_tool.md | 8 +- lite/tools/auto_transform.sh | 166 ++++++++++++++++++++++++ 2 files changed, 170 insertions(+), 4 deletions(-) create mode 100644 lite/tools/auto_transform.sh diff --git a/docs/user_guides/model_optimize_tool.md b/docs/user_guides/model_optimize_tool.md index 340e4fe397..31288d85c7 100644 --- a/docs/user_guides/model_optimize_tool.md +++ b/docs/user_guides/model_optimize_tool.md @@ -130,14 +130,14 @@ opt可以统计并打印出model中的算子信息、判断Paddle-Lite是否支 **背景**:如果想用Paddle-Lite运行第三方来源(tensorflow、caffe、onnx)模型,一般需要经过两次转化。即使用x2paddle工具将第三方模型转化为PaddlePaddle格式,再使用opt将PaddlePaddle模型转化为Padde-Lite可支持格式。 为了简化这一过程,我们提供一键脚本,将x2paddle转化和opt转化合并: -**一键转化脚本**:[auto_transform.sh](https://paddlelite-data.bj.bcebos.com/model_optimize_tool/auto_transform.sh) +**一键转化脚本**:[auto_transform.sh](https://github.com/PaddlePaddle/Paddle-Lite/blob/release/v2.3/lite/tools/auto_transform.sh) -**环境要求**:使用`auto_transform.sh`脚本转化第三方模型时,需要先安装x2paddle环境,请参考[x2paddle环境安装方法](https://github.com/PaddlePaddle/X2Paddle#环境依赖) 安装x2paddle和其环境依赖项。 +**环境要求**:使用`auto_transform.sh`脚本转化第三方模型时,需要先安装x2paddle环境,请参考[x2paddle环境安装方法](https://github.com/PaddlePaddle/X2Paddle#环境依赖) 安装x2paddle和x2paddle依赖项(tensorflow、caffe等)。 **使用方法**: -(1)打印帮助帮助信息:` ./auto_transform.sh` +(1)打印帮助帮助信息:` sh ./auto_transform.sh` (2)转化模型方法 @@ -147,7 +147,7 @@ USAGE: tranform model from tensorflow/caffe/onnx form into paddle-lite naive-buffer form. ---------------------------------------- example: - ./auto_transform.sh --framework=tensorflow --model=tf_model.pb --optimize_out=opt_model_result + sh ./auto_transform.sh --framework=tensorflow --model=tf_model.pb --optimize_out=opt_model_result ---------------------------------------- Arguments about x2paddle: --framework=(tensorflow|caffe|onnx); diff --git a/lite/tools/auto_transform.sh b/lite/tools/auto_transform.sh new file mode 100644 index 0000000000..db37e13dfa --- /dev/null +++ b/lite/tools/auto_transform.sh @@ -0,0 +1,166 @@ +#!/usr/bin/env bash +# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +#set -u # Check for undefined variables + +# Global variables +########################################### +# (1) x2paddle variables +framework="caffe" # framework=(caffe|tensorflow|onnx) +prototxt="" +weight="" +model="" +# fluid_save__dir: the path of x2paddlei's output result; this is used as `model_dir` of opt +fluid_save_dir="saved_fluid" +########################################### +# (2)opt variables +valid_targets="arm" # valid_targets=(arm|opencl|x86|npu|xpu) +optimize_out="lite_opt_dir" + + +# check current system +system=`uname -s` +opt="" +if [ ${system} == "Darwin" ]; then + opt=opt_mac +else + opt=opt +fi + +function check_x2paddle { + message=$(which x2paddle) + if [ ! $message ]; then + echo "please install x2paddle environment first, you can install it according to https://github.com/PaddlePaddle/X2Paddle#%E7%8E%AF%E5%A2%83%E4%BE%9D%E8%B5%96" + exit 1 + fi +} +function check_model_optimize_tool { + has_opt=$(find $opt) + if [ -z "$has_opt" ]; then + wget https://paddlelite-data.bj.bcebos.com/model_optimize_tool/$opt + chmod +x $opt + fi +} +function x2paddle_transform { + check_x2paddle + x2paddle + if [ "$framework" == "caffe" ]; then + x2paddle --framework caffe \ + --prototxt=$prototxt \ + --weight=$weight \ + --save_dir=$fluid_save_dir + elif [ "$framework" == "tensorflow" ]; then + x2paddle --framework=tensorflow \ + --model=$model \ + --save_dir=$fluid_save_dir + elif [ "$framework" == "onnx" ]; then + x2paddle --framework=onnx \ + --model=$model \ + --save_dir=$fluid_save_dir + else + echo "error: unsupported framwork, x2paddle supports three framework: caffe、tensorflow and onnx." + exit 1 + fi +} + +function model_optimimize_tool_transform { + check_model_optimize_tool + ./$opt \ + --model_dir=$fluid_save_dir/inference_model \ + --optimize_out_type=naive_buffer \ + --optimize_out=$optimize_out \ + --valid_targets=$valid_targets +} + +function print_usage { + set +x + echo "\nUSAGE:" + echo " auto_build.sh combines the function of x2paddle and opt, it can " + echo " tranform model from tensorflow/caffe/onnx form into paddle-lite naive-buffer form." + echo "----------------------------------------" + echo "example:" + echo " ./auto_build.sh --framework=tensorflow --model=tf_model.pb --optimize_out=opt_model_result" + echo "----------------------------------------" + echo "Arguments about x2paddle:" + echo " --framework=(tensorflow|caffe|onnx);" + echo " --model='model file for tensorflow or onnx';" + echo " --prototxt='proto file for caffe' --weight='weight file for caffe'" + + echo "For TensorFlow:" + echo " --framework=tensorflow --model=tf_model.pb" + echo + echo "For Caffe:" + echo " --framework=caffe --prototxt=deploy.prototxt --weight=deploy.caffemodel" + echo + echo "For ONNX" + echo " --framework=onnx --model=onnx_model.onnx" + echo + echo "Arguments about opt:" + echo " --valid_targets=(arm|opencl|x86|npu|xpu); valid targets on Paddle-Lite." + echo " --fluid_save_dir='path to outputed model after x2paddle'" + echo " --optimize_out='path to outputed Paddle-Lite model'" + echo "----------------------------------------" + echo +} + +function main { + # Parse command line. + if [ $# -eq 0 ] ; then + print_usage + exit 1 + fi + for i in "$@"; do + case $i in + --framework=*) + framework="${i#*=}" + shift + ;; + --prototxt=*) + prototxt="${i#*=}" + shift + ;; + --weight=*) + weight="${i#*=}" + shift + ;; + --model=*) + model="${i#*=}" + shift + ;; + --fluid_save_dir=*) + fluid_save_dir="${i#*=}" + shift + ;; + --valid_targets=*) + valid_targets="${i#*=}" + shift + ;; + --optimize_out=*) + optimize_out="${i#*=}" + shift + ;; + *) + # unknown option + print_usage + exit 1 + ;; + esac + done + x2paddle_transform + model_optimimize_tool_transform +} + +main $@ -- GitLab