提交 15925d14 编写于 作者: 刘琦

Merge branch 'quantize' into 'master'

Fix post train quantize

See merge request !1059
......@@ -167,7 +167,7 @@ quantization_tests:
fi
- if [ -z "$TARGET_SOCS" ]; then TARGET_SOCS=random; fi
- >
for CONF_FILE in mace-models/mobilenet-v1/mobilenet-v1-quantize-retrain.yml mace-models/mobilenet-v1/mobilenet-v1-quantize-retrain-for-check-only.yml mace-models/mobilenet-v1/mobilenet-v1-quantize-retrain-dsp.yml;
for CONF_FILE in mace-models/mobilenet-v1/mobilenet-v1-quantize-friendly.yml mace-models/mobilenet-v1/mobilenet-v1-quantize-retrain-for-check-only.yml mace-models/mobilenet-v1/mobilenet-v1-quantize-retrain-dsp.yml;
do
python tools/converter.py convert --config=${CONF_FILE} --target_socs=$TARGET_SOCS --model_graph_format=file --model_data_format=file || exit 1;
python tools/converter.py run --config=${CONF_FILE} --target_socs=$TARGET_SOCS --device_yml=${DEVICE_CONF_FILE} --round=1 --validate --model_graph_format=file --model_data_format=file || exit 1;
......
......@@ -1713,8 +1713,8 @@ class Transformer(base_converter.ConverterInterface):
mace_check(output in self._quantize_activation_info,
"%s does not have quantize activation info"
% op)
op.quantize_info.append(
self._quantize_activation_info[output])
op.quantize_info.extend([
self._quantize_activation_info[output]])
if not self._option.quantize:
return False
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册