未验证 提交 10881b6e 编写于 作者: G Guanghua Yu 提交者: GitHub

fix problem of persistable var saving in QAT (#47178)

上级 af9486fc
......@@ -32,6 +32,7 @@ from paddle.fluid.io import load_inference_model, save_inference_model
from ..quantization_pass import ReplaceFakeQuantDequantPass, QuantWeightPass
from paddle.fluid.log_helper import get_logger
from .. import quantization_pass
from ..utils import move_persistable_var_to_global_block
from . import utils
from . import fuse_utils
......@@ -552,6 +553,8 @@ class ImperativeQuantizeOutputs(object):
clip_extra = True
move_persistable_var_to_global_block(infer_program)
save_inference_model(dirname=dirname,
feeded_var_names=feed_target_names,
target_vars=fetch_targets,
......
......@@ -449,21 +449,7 @@ class PostTrainingQuantization(object):
self._collect_dynamic_quantize_op_threshold(
self._dynamic_quantize_op_type)
# Move sub blocks persistable var to global block
global_block = self._program.global_block()
for _op in global_block.ops:
if _op.type == "while":
_block_id = _op.attr("sub_block").id
_block = self._program.block(_block_id)
persistables = []
for _name, _var in _block.vars.items():
if _var.persistable:
global_block._clone_variable(_var)
persistables.append(_name)
for _name in persistables:
_block._remove_var(_name)
persistables.extend(_op.input('X'))
_op.desc.set_input("X", persistables)
utils.move_persistable_var_to_global_block(self._program)
if not self._return_graph:
return self._program
......
......@@ -435,6 +435,24 @@ def calculate_quant_cos_error(orig_tensor, qdq_tensor):
return cos_sim
def move_persistable_var_to_global_block(program):
# Move sub blocks persistable var to global block
global_block = program.global_block()
for _op in global_block.ops:
if _op.type == "while":
_block_id = _op.attr("sub_block").id
_block = program.block(_block_id)
persistables = []
for _name, _var in _block.vars.items():
if _var.persistable:
global_block._clone_variable(_var)
persistables.append(_name)
for _name in persistables:
_block._remove_var(_name)
persistables.extend(_op.input('X'))
_op.desc.set_input("X", persistables)
def l2_loss(gt, pred):
return ((gt - pred)**2).mean()
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册