提交 9a12fab4 编写于 作者: C chenfei

fix bug of freeze_bn default value

上级 e9eee16e
......@@ -476,7 +476,7 @@ def export(network, *inputs, file_name, mean=127.5, std_dev=127.5, file_format='
def convert_quant_network(network,
bn_fold=True,
freeze_bn=1e7,
freeze_bn=10000000,
quant_delay=(0, 0),
num_bits=(8, 8),
per_channel=(False, False),
......
......@@ -50,7 +50,8 @@ if __name__ == "__main__":
# define fusion network
network = LeNet5Fusion(cfg.num_classes)
# convert fusion network to quantization aware network
network = quant.convert_quant_network(network, quant_delay=0, bn_fold=False, freeze_bn=10000)
network = quant.convert_quant_network(network, quant_delay=0, bn_fold=False, freeze_bn=10000,
per_channel=[True, False])
# define loss
net_loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True, reduction="mean")
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册