提交 c37d017a 编写于 作者: L luxuhui

fix bug in converter and dana

N/A
Signed-off-by: NLuxuhui <luxuhui@xiaomi.com>
上级 7d927324
......@@ -195,10 +195,11 @@ def main(unused_args):
address_sanitizer=FLAGS.address_sanitizer,
simpleperf=FLAGS.simpleperf)
globals()[FLAGS.stdout_processor](stdouts, dev, target_abi)
report_run_statistics(stdouts=stdouts,
device=dev['device_name'],
soc=dev['target_socs'],
abi=target_abi, dana_util=dana_util)
if dana_util.service_available():
report_run_statistics(stdouts=stdouts,
device=dev['device_name'],
soc=dev['target_socs'],
abi=target_abi, dana_util=dana_util)
if __name__ == "__main__":
......
......@@ -2134,12 +2134,12 @@ class Transformer(base_converter.ConverterInterface):
continue
shape_idx = kOpTypeInputIdxMap[op.type]
dim_arg = ConverterUtil.get_arg(op, MaceKeyword.mace_dim_str)
if len(op.input) > shape_idx and dim_arg is None:
if len(op.input) > shape_idx and dim_arg is None and \
op.input[shape_idx] in self._consts:
shape_tensor = self._consts[op.input[shape_idx]]
if shape_tensor is not None:
dim_arg = op.arg.add()
dim_arg.name = MaceKeyword.mace_dim_str
dim_arg.ints.extend(shape_tensor.int32_data)
dim_arg = op.arg.add()
dim_arg.name = MaceKeyword.mace_dim_str
dim_arg.ints.extend(shape_tensor.int32_data)
def fold_fc_reshape(self):
net = self._model
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册