diff --git a/tools/python/transform/transformer.py b/tools/python/transform/transformer.py index 9b26d487cb3055864f36f1af94cdc02736d1a40d..a10b8701c36d34d72dbf4776d4087dec0a5ca6e1 100644 --- a/tools/python/transform/transformer.py +++ b/tools/python/transform/transformer.py @@ -963,7 +963,7 @@ class Transformer(base_converter.ConverterInterface): if consumer_op.type == MaceOp.Activation.name \ and ConverterUtil.get_arg( consumer_op, - MaceKeyword.mace_activation_type_str).s != 'PRELU': + MaceKeyword.mace_activation_type_str).s != b'PRELU': # noqa print("Fold activation: %s(%s)" % (op.name, op.type)) op.name = consumer_op.name op.output[0] = consumer_op.output[0] diff --git a/tools/validate.py b/tools/validate.py index 9980a0a92c74a19744f9e0fa4d55722c1ff3849c..8b8a9e82c8eccd10243ba1232984c080c873900d 100644 --- a/tools/validate.py +++ b/tools/validate.py @@ -64,7 +64,16 @@ def calculate_similarity(u, v, data_type=np.float64): u = u.astype(data_type) if v.dtype is not data_type: v = v.astype(data_type) - return np.dot(u, v) / (np.linalg.norm(u) * np.linalg.norm(v)) + u_norm = np.linalg.norm(u) + v_norm = np.linalg.norm(v) + norm = u_norm * v_norm + if norm == 0: + if u_norm == 0 and v_norm == 0: + return 1 + else: + return 0 + else: + return np.dot(u, v) / norm def calculate_pixel_accuracy(out_value, mace_out_value):