prepare.sh 5.7 KB
Newer Older
S
stephon 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35
#!/bin/bash
FILENAME=$1

# MODE be one of ['lite_train_lite_infer' 'lite_train_whole_infer' 'whole_train_whole_infer',  
#                 'whole_infer', 'klquant_whole_infer',
#                 'cpp_infer', 'serving_infer',  'lite_infer']

MODE=$2

dataline=$(cat ${FILENAME})
# parser params
IFS=$'\n'
lines=(${dataline})

function func_parser_key(){
    strs=$1
    IFS=":"
    array=(${strs})
    tmp=${array[0]}
    echo ${tmp}
}

function func_parser_value(){
    strs=$1
    IFS=":"
    array=(${strs})
    if [ ${#array[*]} = 2 ]; then
        echo ${array[1]}
    else
    	IFS="|"
    	tmp="${array[1]}:${array[2]}"
        echo ${tmp}
    fi
}

D
dongshuilong 已提交
36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88
function func_get_url_file_name(){
    strs=$1
    IFS="/"
    array=(${strs})
    tmp=${array[${#array[@]}-1]}
    echo ${tmp}
}

model_name=$(func_parser_value "${lines[1]}")

if [ ${MODE} = "cpp_infer" ];then
   if [[ $FILENAME == *infer_cpp_linux_gpu_cpu.txt ]];then
	cpp_type=$(func_parser_value "${lines[2]}")
	cls_inference_model_dir=$(func_parser_value "${lines[3]}")
	det_inference_model_dir=$(func_parser_value "${lines[4]}")
	cls_inference_url=$(func_parser_value "${lines[5]}")
	det_inference_url=$(func_parser_value "${lines[6]}")

	if [[ $cpp_type == "cls" ]];then
	    eval "wget -nc $cls_inference_url"
	    tar xf "${model_name}_inference.tar"
	    eval "mv inference $cls_inference_model_dir"
	    cd dataset
    	    rm -rf ILSVRC2012
    	    wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/data/whole_chain/whole_chain_infer.tar
    	    tar xf whole_chain_infer.tar
    	    ln -s whole_chain_infer ILSVRC2012
	    cd ..
	elif [[ $cpp_type == "shitu" ]];then
	    eval "wget -nc $cls_inference_url"
	    tar_name=$(func_get_url_file_name "$cls_inference_url")
	    model_dir=${tar_name%.*}
	    eval "tar xf ${tar_name}"
	    eval "mv ${model_dir} ${cls_inference_model_dir}"
	    
	    eval "wget -nc $det_inference_url"
	    tar_name=$(func_get_url_file_name "$det_inference_url") 
	    model_dir=${tar_name%.*}
	    eval "tar xf ${tar_name}"
	    eval "mv ${model_dir} ${det_inference_model_dir}"
	    cd dataset
	    wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/rec/data/drink_dataset_v1.0.tar
	    tar -xf drink_dataset_v1.0.tar
	else
	    echo "Wrong cpp type in config file in line 3. only support cls, shitu"
	fi
	exit 0
   else
	echo "use wrong config file"
	exit 1
   fi
fi

S
stephon 已提交
89 90 91 92
model_name=$(func_parser_value "${lines[1]}")
model_url_value=$(func_parser_value "${lines[35]}")
model_url_key=$(func_parser_key "${lines[35]}")

D
dongshuilong 已提交
93 94 95 96 97 98 99 100 101 102 103 104 105
if [[ $FILENAME == *GeneralRecognition* ]];then
   cd dataset
   rm -rf Aliproduct
   rm -rf train_reg_all_data.txt
   rm -rf demo_train
   wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/data/whole_chain/tipc_shitu_demo_data.tar
   tar -xf tipc_shitu_demo_data.tar
   ln -s tipc_shitu_demo_data Aliproduct
   ln -s tipc_shitu_demo_data/demo_train.txt train_reg_all_data.txt
   ln -s tipc_shitu_demo_data/demo_train demo_train
   cd tipc_shitu_demo_data
   ln -s demo_test.txt val_list.txt
   cd ../../
106 107
   eval "wget -nc $model_url_value"
   mv general_PPLCNet_x2_5_pretrained_v1.0.pdparams GeneralRecognition_PPLCNet_x2_5_pretrained.pdparams
D
dongshuilong 已提交
108 109 110
   exit 0
fi

S
stephon 已提交
111 112 113 114 115 116 117 118 119 120
if [ ${MODE} = "lite_train_lite_infer" ] || [ ${MODE} = "lite_train_whole_infer" ];then
    # pretrain lite train data
    cd dataset
    rm -rf ILSVRC2012
    wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/data/whole_chain/whole_chain_little_train.tar
    tar xf whole_chain_little_train.tar
    ln -s whole_chain_little_train ILSVRC2012
    cd ILSVRC2012 
    mv train.txt train_list.txt
    mv val.txt val_list.txt
D
dongshuilong 已提交
121
    cp -r train/* val/
S
stephon 已提交
122
    cd ../../
D
dongshuilong 已提交
123
elif [ ${MODE} = "whole_infer" ] || [ ${MODE} = "klquant_whole_infer" ];then
S
stephon 已提交
124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139
    # download data
    cd dataset
    rm -rf ILSVRC2012
    wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/data/whole_chain/whole_chain_infer.tar
    tar xf whole_chain_infer.tar
    ln -s whole_chain_infer ILSVRC2012
    cd ILSVRC2012 
    mv val.txt val_list.txt
    ln -s val_list.txt train_list.txt
    cd ../../
    # download inference or pretrained model
    eval "wget -nc $model_url_value"
    if [[ $model_url_key == *inference* ]]; then
	rm -rf inference
	tar xf "${model_name}_inference.tar"
    fi
D
dongshuilong 已提交
140 141 142 143 144
    if [[ $model_name == "SwinTransformer_large_patch4_window7_224" || $model_name == "SwinTransformer_large_patch4_window12_384" ]];then
	cmd="mv ${model_name}_22kto1k_pretrained.pdparams ${model_name}_pretrained.pdparams"
	eval $cmd
    fi

S
stephon 已提交
145 146 147 148 149 150 151 152
elif [ ${MODE} = "whole_train_whole_infer" ];then
    cd dataset
    rm -rf ILSVRC2012
    wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/data/whole_chain/whole_chain_CIFAR100.tar
    tar xf whole_chain_CIFAR100.tar
    ln -s whole_chain_CIFAR100 ILSVRC2012
    cd ILSVRC2012 
    mv train.txt train_list.txt
D
dongshuilong 已提交
153
    mv test.txt val_list.txt
S
stephon 已提交
154 155 156 157 158 159 160 161 162 163 164 165 166 167
    cd ../../
fi

if [ ${MODE} = "serving_infer" ];then
    # prepare serving env
    python_name=$(func_parser_value "${lines[2]}")
    ${python_name} -m pip install install paddle-serving-server-gpu==0.6.1.post101
    ${python_name} -m pip install paddle_serving_client==0.6.1
    ${python_name} -m pip install paddle-serving-app==0.6.1
    unset http_proxy
    unset https_proxy
    cd ./deploy/paddleserving
    wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/ResNet50_vd_infer.tar && tar xf ResNet50_vd_infer.tar
fi
L
lubin10 已提交
168 169 170 171 172 173 174 175 176 177 178 179

if [ ${MODE} = "paddle2onnx_infer" ];then
    # prepare paddle2onnx env
    python_name=$(func_parser_value "${lines[2]}")
    ${python_name} -m pip install install paddle2onnx
    ${python_name} -m pip install onnxruntime

    # wget model
    cd deploy && mkdir models && cd models
    wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/ResNet50_vd_infer.tar  && tar xf ResNet50_vd_infer.tar
    cd ../../
fi