未验证 提交 79384c60 编写于 作者: B Bai Yifan 提交者: GitHub

Fix quant aware (#431) (#458)

上级 8dd809d9
...@@ -185,7 +185,8 @@ def quant_aware(program, ...@@ -185,7 +185,8 @@ def quant_aware(program,
weight_preprocess_func=None, weight_preprocess_func=None,
act_preprocess_func=None, act_preprocess_func=None,
optimizer_func=None, optimizer_func=None,
executor=None): executor=None,
return_program=False):
"""Add quantization and dequantization operators to "program" """Add quantization and dequantization operators to "program"
for quantization training or testing. for quantization training or testing.
...@@ -226,6 +227,8 @@ def quant_aware(program, ...@@ -226,6 +227,8 @@ def quant_aware(program,
quantization function and preprocess function, this function must be set. Default is None. quantization function and preprocess function, this function must be set. Default is None.
exe(Fluid.Executor): If user want to use self-defined quantization function and preprocess function, exe must be set for exe(Fluid.Executor): If user want to use self-defined quantization function and preprocess function, exe must be set for
initialization. Default is None. initialization. Default is None.
return_program(bool): If user want return value is a Program rather than Compiled Program, This argument should be set True.
Default is False.
Returns: Returns:
fluid.CompiledProgram | fluid.Program: Program with quantization and dequantization ``operators`` fluid.CompiledProgram | fluid.Program: Program with quantization and dequantization ``operators``
""" """
...@@ -291,7 +294,7 @@ def quant_aware(program, ...@@ -291,7 +294,7 @@ def quant_aware(program,
VARS_MAPPING_TABLE)) VARS_MAPPING_TABLE))
save_dict(main_graph.out_node_mapping_table) save_dict(main_graph.out_node_mapping_table)
if for_test: if for_test or return_program:
quant_program = main_graph.to_program() quant_program = main_graph.to_program()
else: else:
quant_program = fluid.CompiledProgram(main_graph.graph) quant_program = fluid.CompiledProgram(main_graph.graph)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册