提交 75d72eef 编写于 作者: 刘琦

Merge branch 'update_config_mode' into 'master'

update config mode

See merge request !40
; [models]
; DIRECTORIES = tools/mobile_net/64/,tools/mobile_net/128/
[configs]
; If exists 'models' section, this section will be global variable to sub models
; Otherwise this is the model's config and must be put in the same dirs with model.pb
TF_INPUT_NODE=input
TF_OUTPUT_NODE=output
MODEL_TAG=mobile_net
INPUT_SHAPE=1,64,64,3
OUTPUT_SHAPE=1,64,64,2
RUNTIME=gpu
TARGET_ABI=armeabi-v7a
LIMIT_OPENCL_KERNEL_TIME=0
DSP_MODE=0
BENCHMARK_FLAG=0
EMBED_MODEL_DATA=1
#!/usr/bin/env bash #!/usr/bin/env bash
LIBMACE_TAG=`git describe --abbrev=0 --tags` LIBMACE_TAG=`git describe --abbrev=0 --tags`
VLOG_LEVEL=0
LIBMACE_SOURCE_DIR=`/bin/pwd` LIBMACE_SOURCE_DIR=`/bin/pwd`
INPUT_FILE_NAME="model_input" INPUT_FILE_NAME="model_input"
OUTPUT_FILE_NAME="model.out" OUTPUT_FILE_NAME="model.out"
OUTPUT_LIST_FILE="model.list" OUTPUT_LIST_FILE="model.list"
TF_MODEL_NAME="model.pb"
TF_MODEL_FILE_PATH=$TF_MODEL_FILE_DIR/$TF_MODEL_NAME
PHONE_DATA_DIR="/data/local/tmp/mace_run" PHONE_DATA_DIR="/data/local/tmp/mace_run"
KERNEL_DIR="${PHONE_DATA_DIR}/cl/" KERNEL_DIR="${PHONE_DATA_DIR}/cl/"
CODEGEN_DIR=${LIBMACE_SOURCE_DIR}/codegen CODEGEN_DIR=${LIBMACE_SOURCE_DIR}/codegen
......
# example.yaml
# Each yaml file describes a exported library (could be named [target_abi]/libmace-${filename}.a),
# which can contains more than one models
target_abis: [armeabi-v7a, arm64-v8a]
target_socs: [MSM8953] # target_socs not enabled yet
embed_model_data: 1
vlog_level: 0
models:
preview_net:
model_file_path: path/to/model64.pb # also support http:// and https://
input_node: input_node
output_node: output_node
input_shape: 1,64,64,3
output_shape: 1,64,64,2
runtime: gpu
limit_opencl_kernel_time: 0
dsp_mode: 0
capture_net:
model_file_path: path/to/model256.pb
input_node: input_node
output_node: output_node
input_shape: 1,256,256,3
output_shape: 1,256,256,2
runtime: gpu
limit_opencl_kernel_time: 1
dsp_mode: 0
...@@ -15,10 +15,10 @@ if [ "${BENCHMARK_FLAG}" = "1" ]; then ...@@ -15,10 +15,10 @@ if [ "${BENCHMARK_FLAG}" = "1" ]; then
OBFUSCATE=False OBFUSCATE=False
fi fi
bazel-bin/lib/python/tools/tf_converter --input=${TF_MODEL_FILE_PATH} \ bazel-bin/lib/python/tools/tf_converter --input=${MODEL_FILE_PATH} \
--output=${MODEL_CODEGEN_DIR}/model.cc \ --output=${MODEL_CODEGEN_DIR}/model.cc \
--input_node=${TF_INPUT_NODE} \ --input_node=${INPUT_NODE} \
--output_node=${TF_OUTPUT_NODE} \ --output_node=${OUTPUT_NODE} \
--data_type=${DATA_TYPE} \ --data_type=${DATA_TYPE} \
--runtime=${RUNTIME} \ --runtime=${RUNTIME} \
--output_type=source \ --output_type=source \
......
...@@ -2,20 +2,21 @@ ...@@ -2,20 +2,21 @@
# Must run at root dir of libmace project. # Must run at root dir of libmace project.
# python tools/mace_tools.py \ # python tools/mace_tools.py \
# --global_config=models/config \ # --config=tools/example.yaml \
# --round=100 \ # --round=100 \
# --mode=all # --mode=all
import argparse import argparse
import base64
import os import os
import shutil import shutil
import subprocess import subprocess
import sys import sys
import urllib
import yaml
from ConfigParser import ConfigParser from ConfigParser import ConfigParser
tf_model_file_dir_key = "TF_MODEL_FILE_DIR"
def run_command(command): def run_command(command):
print("Run command: {}".format(command)) print("Run command: {}".format(command))
...@@ -33,17 +34,13 @@ def run_command(command): ...@@ -33,17 +34,13 @@ def run_command(command):
result.returncode, command)) result.returncode, command))
def get_libs(configs): def get_libs(target_abi, configs):
global_target_abi = ""
global_runtime = ""
runtime_list = [] runtime_list = []
for config in configs: for model_name in configs["models"]:
if global_target_abi == "": model_runtime = configs["models"][model_name]["runtime"]
global_target_abi = config["TARGET_ABI"] runtime_list.append(model_runtime.lower())
elif global_target_abi != config["TARGET_ABI"]:
raise Exception("Multiple TARGET_ABI found in config files!")
runtime_list.append(config["RUNTIME"])
global_runtime = ""
if "dsp" in runtime_list: if "dsp" in runtime_list:
global_runtime = "dsp" global_runtime = "dsp"
elif "gpu" in runtime_list: elif "gpu" in runtime_list:
...@@ -53,7 +50,7 @@ def get_libs(configs): ...@@ -53,7 +50,7 @@ def get_libs(configs):
else: else:
raise Exception("Not found available RUNTIME in config files!") raise Exception("Not found available RUNTIME in config files!")
libmace_name = "libmace-{}-{}".format(global_target_abi, global_runtime) libmace_name = "libmace-{}-{}".format(target_abi, global_runtime)
command = "bash tools/download_and_link_lib.sh " + libmace_name command = "bash tools/download_and_link_lib.sh " + libmace_name
run_command(command) run_command(command)
...@@ -89,10 +86,12 @@ def tuning_run(model_output_dir, running_round, tuning, production_mode): ...@@ -89,10 +86,12 @@ def tuning_run(model_output_dir, running_round, tuning, production_mode):
model_output_dir, running_round, int(tuning), int(production_mode)) model_output_dir, running_round, int(tuning), int(production_mode))
run_command(command) run_command(command)
def benchmark_model(model_output_dir): def benchmark_model(model_output_dir):
command = "bash tools/benchmark.sh {}".format(model_output_dir) command = "bash tools/benchmark.sh {}".format(model_output_dir)
run_command(command) run_command(command)
def run_model(model_output_dir, running_round): def run_model(model_output_dir, running_round):
tuning_run(model_output_dir, running_round, False, False) tuning_run(model_output_dir, running_round, False, False)
...@@ -150,79 +149,10 @@ def merge_libs_and_tuning_results(output_dir, model_output_dirs): ...@@ -150,79 +149,10 @@ def merge_libs_and_tuning_results(output_dir, model_output_dirs):
run_command(command) run_command(command)
def parse_sub_model_configs(model_dirs, global_configs):
model_configs = []
for model_dir in model_dirs:
model_config = {}
model_config_path = os.path.join(model_dir, "config")
if os.path.exists(model_config_path):
cf = ConfigParser()
# Preserve character case
cf.optionxform = str
cf.read(model_config_path)
if "configs" in cf.sections():
config_list = cf.items("configs")
for config_map in config_list:
model_config[config_map[0]] = config_map[1]
else:
raise Exception("No config msg found in {}".format(model_config_path))
else:
raise Exception("Config file '{}' not found".format(model_config_path))
model_config[tf_model_file_dir_key] = model_dir
for config_map in global_configs:
model_config[config_map[0]] = config_map[1]
model_configs.append(model_config)
return model_configs
def parse_model_configs(): def parse_model_configs():
config_parser = ConfigParser() with open(FLAGS.config) as f:
# Preserve character case configs = yaml.load(f)
config_parser.optionxform = str return configs
global_config_dir = os.path.dirname(FLAGS.global_config)
try:
config_parser.read(FLAGS.global_config)
config_sections = config_parser.sections()
model_dirs = []
model_output_map = {}
if ("models" in config_sections) and (config_parser.items("models")):
model_dirs_str = config_parser.get(
"models", "DIRECTORIES")
model_dirs_str = model_dirs_str.rstrip(
",")
# Remove repetition element
model_dirs = list(
set(model_dirs_str.split(",")))
for model_dir in model_dirs:
# Create output dirs
model_output_dir = FLAGS.output_dir + "/" + model_dir
model_output_map[model_dir] = model_output_dir
else:
model_dirs = [global_config_dir]
# Create output dirs
model_output_dir = FLAGS.output_dir + "/" + global_config_dir
model_output_map[global_config_dir] = model_output_dir
except Exception as e:
print("Error in read model path msg. Exception: {}".format(e))
return
global_configs = []
if "configs" in config_sections:
global_configs = config_parser.items("configs")
return parse_sub_model_configs(model_dirs, global_configs), model_output_map
def parse_args(): def parse_args():
...@@ -230,7 +160,7 @@ def parse_args(): ...@@ -230,7 +160,7 @@ def parse_args():
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.register("type", "bool", lambda v: v.lower() == "true") parser.register("type", "bool", lambda v: v.lower() == "true")
parser.add_argument( parser.add_argument(
"--global_config", "--config",
type=str, type=str,
default="./tool/config", default="./tool/config",
help="The global config file of models.") help="The global config file of models.")
...@@ -246,7 +176,7 @@ def parse_args(): ...@@ -246,7 +176,7 @@ def parse_args():
def main(unused_args): def main(unused_args):
configs, model_output_map = parse_model_configs() configs = parse_model_configs()
if FLAGS.mode == "build" or FLAGS.mode == "all": if FLAGS.mode == "build" or FLAGS.mode == "all":
# Remove previous output dirs # Remove previous output dirs
...@@ -258,42 +188,63 @@ def main(unused_args): ...@@ -258,42 +188,63 @@ def main(unused_args):
if FLAGS.mode == "validate": if FLAGS.mode == "validate":
FLAGS.round = 1 FLAGS.round = 1
libmace_name = get_libs(configs) # target_abi = configs["target_abi"]
# libmace_name = get_libs(target_abi, configs)
# Transfer params by environment
# os.environ["TARGET_ABI"] = target_abi
os.environ["EMBED_MODEL_DATA"] = str(configs["embed_model_data"])
os.environ["VLOG_LEVEL"] = str(configs["vlog_level"])
os.environ["PROJECT_NAME"] = os.path.splitext(FLAGS.config)[0]
model_output_dirs = [] for target_abi in configs["target_abis"]:
for config in configs: libmace_name = get_libs(target_abi, configs)
# Transfer params by environment # Transfer params by environment
for key in config: os.environ["TARGET_ABI"] = target_abi
os.environ[key] = config[key] model_output_dirs = []
model_output_dir = model_output_map[config[tf_model_file_dir_key]] for model_name in configs["models"]:
model_output_dirs.append(model_output_dir) # Transfer params by environment
os.environ["MODEL_TAG"] = model_name
model_config = configs["models"][model_name]
for key in model_config:
os.environ[key.upper()] = str(model_config[key])
model_output_dir = FLAGS.output_dir + "/" + target_abi + "/" + model_name + "/" + base64.b16encode(
model_config["model_file_path"])
model_output_dirs.append(model_output_dir)
if FLAGS.mode == "build" or FLAGS.mode == "all":
if os.path.exists(model_output_dir):
shutil.rmtree(model_output_dir)
os.makedirs(model_output_dir)
clear_env()
if FLAGS.mode == "build" or FLAGS.mode == "all": # Support http:// and https://
if os.path.exists(model_output_dir): if model_config["model_file_path"].startswith(
shutil.rmtree(model_output_dir) "http://") or model_config["model_file_path"].startswith("https://"):
os.makedirs(model_output_dir) os.environ["MODEL_FILE_PATH"] = model_output_dir + "/model.pb"
clear_env() urllib.urlretrieve(model_config["model_file_path"], os.environ["MODEL_FILE_PATH"])
if FLAGS.mode == "build" or FLAGS.mode == "run" or FLAGS.mode == "validate" or FLAGS.mode == "all": if FLAGS.mode == "build" or FLAGS.mode == "run" or FLAGS.mode == "validate" or FLAGS.mode == "all":
generate_random_input(model_output_dir) generate_random_input(model_output_dir)
if FLAGS.mode == "build" or FLAGS.mode == "all": if FLAGS.mode == "build" or FLAGS.mode == "all":
generate_model_code() generate_model_code()
build_mace_run_prod(model_output_dir, FLAGS.tuning, libmace_name) build_mace_run_prod(model_output_dir, FLAGS.tuning, libmace_name)
if FLAGS.mode == "run" or FLAGS.mode == "validate" or FLAGS.mode == "all": if FLAGS.mode == "run" or FLAGS.mode == "validate" or FLAGS.mode == "all":
run_model(model_output_dir, FLAGS.round) run_model(model_output_dir, FLAGS.round)
if FLAGS.mode == "benchmark": if FLAGS.mode == "benchmark":
benchmark_model(model_output_dir) benchmark_model(model_output_dir)
if FLAGS.mode == "validate" or FLAGS.mode == "all": if FLAGS.mode == "validate" or FLAGS.mode == "all":
validate_model(model_output_dir) validate_model(model_output_dir)
if FLAGS.mode == "build" or FLAGS.mode == "merge" or FLAGS.mode == "all": if FLAGS.mode == "build" or FLAGS.mode == "merge" or FLAGS.mode == "all":
merge_libs_and_tuning_results(FLAGS.output_dir, model_output_dirs) merge_libs_and_tuning_results(FLAGS.output_dir + "/" + target_abi,
model_output_dirs)
if __name__ == '__main__': if __name__ == "__main__":
FLAGS, unparsed = parse_args() FLAGS, unparsed = parse_args()
main(unused_args=[sys.argv[0]] + unparsed) main(unused_args=[sys.argv[0]] + unparsed)
...@@ -24,25 +24,25 @@ cp ${LIBMACE_SOURCE_DIR}/lib/hexagon/libhexagon_controller.so ${LIBMACE_BUILD_DI ...@@ -24,25 +24,25 @@ cp ${LIBMACE_SOURCE_DIR}/lib/hexagon/libhexagon_controller.so ${LIBMACE_BUILD_DI
LIBMACE_TEMP_DIR=`mktemp -d -t libmace.XXXX` LIBMACE_TEMP_DIR=`mktemp -d -t libmace.XXXX`
# Merge all libraries in to one # Merge all libraries in to one
echo "create ${LIBMACE_BUILD_DIR}/libmace/lib/libmace_with_models.a" > ${LIBMACE_TEMP_DIR}/libmace_with_models.mri echo "create ${LIBMACE_BUILD_DIR}/libmace/lib/libmace_${PROJECT_NAME}.a" > ${LIBMACE_TEMP_DIR}/libmace_${PROJECT_NAME}.mri
echo "addlib lib/mace/libmace.a" >> ${LIBMACE_TEMP_DIR}/libmace_with_models.mri echo "addlib lib/mace/libmace.a" >> ${LIBMACE_TEMP_DIR}/libmace_${PROJECT_NAME}.mri
echo "addlib lib/mace/libmace_prod.a" >> ${LIBMACE_TEMP_DIR}/libmace_with_models.mri echo "addlib lib/mace/libmace_prod.a" >> ${LIBMACE_TEMP_DIR}/libmace_${PROJECT_NAME}.mri
if [ x"TARGET_ABI" = x"host" ]; then if [ x"TARGET_ABI" = x"host" ]; then
echo "addlib bazel-bin/codegen/libgenerated_opencl_prod.pic.a" >> ${LIBMACE_TEMP_DIR}/libmace_with_models.mri echo "addlib bazel-bin/codegen/libgenerated_opencl_prod.pic.a" >> ${LIBMACE_TEMP_DIR}/libmace_${PROJECT_NAME}.mri
echo "addlib bazel-bin/codegen/libgenerated_tuning_params.pic.a" >> ${LIBMACE_TEMP_DIR}/libmace_with_models.mri echo "addlib bazel-bin/codegen/libgenerated_tuning_params.pic.a" >> ${LIBMACE_TEMP_DIR}/libmace_${PROJECT_NAME}.mri
else else
echo "addlib bazel-bin/codegen/libgenerated_opencl_prod.a" >> ${LIBMACE_TEMP_DIR}/libmace_with_models.mri echo "addlib bazel-bin/codegen/libgenerated_opencl_prod.a" >> ${LIBMACE_TEMP_DIR}/libmace_${PROJECT_NAME}.mri
echo "addlib bazel-bin/codegen/libgenerated_tuning_params.a" >> ${LIBMACE_TEMP_DIR}/libmace_with_models.mri echo "addlib bazel-bin/codegen/libgenerated_tuning_params.a" >> ${LIBMACE_TEMP_DIR}/libmace_${PROJECT_NAME}.mri
fi fi
for model_output_dir in ${MODEL_OUTPUT_DIRS_ARR[@]}; do for model_output_dir in ${MODEL_OUTPUT_DIRS_ARR[@]}; do
for lib in ${model_output_dir}/*.a; do for lib in ${model_output_dir}/*.a; do
echo "addlib ${lib}" >> ${LIBMACE_TEMP_DIR}/libmace_with_models.mri echo "addlib ${lib}" >> ${LIBMACE_TEMP_DIR}/libmace_${PROJECT_NAME}.mri
done done
done done
echo "save" >> ${LIBMACE_TEMP_DIR}/libmace_with_models.mri echo "save" >> ${LIBMACE_TEMP_DIR}/libmace_${PROJECT_NAME}.mri
echo "end" >> ${LIBMACE_TEMP_DIR}/libmace_with_models.mri echo "end" >> ${LIBMACE_TEMP_DIR}/libmace_${PROJECT_NAME}.mri
$ANDROID_NDK_HOME/toolchains/aarch64-linux-android-4.9/prebuilt/linux-x86_64/bin/aarch64-linux-android-ar \ $ANDROID_NDK_HOME/toolchains/aarch64-linux-android-4.9/prebuilt/linux-x86_64/bin/aarch64-linux-android-ar \
-M < ${LIBMACE_TEMP_DIR}/libmace_with_models.mri || exit 1 -M < ${LIBMACE_TEMP_DIR}/libmace_${PROJECT_NAME}.mri || exit 1
rm -rf ${LIBMACE_TEMP_DIR} rm -rf ${LIBMACE_TEMP_DIR}
......
...@@ -23,12 +23,12 @@ if [ "$GENERATE_DATA_OR_NOT" = 1 ]; then ...@@ -23,12 +23,12 @@ if [ "$GENERATE_DATA_OR_NOT" = 1 ]; then
else else
rm -rf ${MODEL_OUTPUT_DIR}/${OUTPUT_FILE_NAME} rm -rf ${MODEL_OUTPUT_DIR}/${OUTPUT_FILE_NAME}
adb </dev/null pull ${PHONE_DATA_DIR}/${OUTPUT_FILE_NAME} ${MODEL_OUTPUT_DIR} adb </dev/null pull ${PHONE_DATA_DIR}/${OUTPUT_FILE_NAME} ${MODEL_OUTPUT_DIR}
python tools/validate.py --model_file ${TF_MODEL_FILE_PATH} \ python tools/validate.py --model_file ${MODEL_FILE_PATH} \
--input_file ${MODEL_OUTPUT_DIR}/${INPUT_FILE_NAME} \ --input_file ${MODEL_OUTPUT_DIR}/${INPUT_FILE_NAME} \
--mace_out_file ${MODEL_OUTPUT_DIR}/${OUTPUT_FILE_NAME} \ --mace_out_file ${MODEL_OUTPUT_DIR}/${OUTPUT_FILE_NAME} \
--mace_runtime ${RUNTIME} \ --mace_runtime ${RUNTIME} \
--input_node ${TF_INPUT_NODE} \ --input_node ${INPUT_NODE} \
--output_node ${TF_OUTPUT_NODE} \ --output_node ${OUTPUT_NODE} \
--input_shape ${INPUT_SHAPE} \ --input_shape ${INPUT_SHAPE} \
--output_shape ${OUTPUT_SHAPE} --output_shape ${OUTPUT_SHAPE}
fi fi
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册