recover_program.py 3.7 KB
Newer Older
C
ceci3 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
#   Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import six
W
whs 已提交
16
import paddle
C
ceci3 已提交
17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33
from ..core import GraphWrapper

__all__ = ['recover_inference_program']


def _remove_fetch_node(program):
    """remove fetch node in program"""
    for block in program.blocks:
        removed = 0
        ops = list(block.ops)
        for op in ops:
            if op.type == "fetch":
                idx = ops.index(op)
                block._remove_op(idx - removed)
                removed += 1


34
def _recover_outputs_attr(program):
C
ceci3 已提交
35 36 37 38 39 40 41 42 43
    """Add the outputs which is only used for training and not saved in
       inference program."""
    for block_idx in six.moves.range(program.num_blocks):
        block = program.block(block_idx)
        for op in block.ops:
            if op.type == "batch_norm":
                if "ReserveSpace" not in op.output_names or len(
                        op.output("ReserveSpace")) == 0:
                    reserve_space = block.create_var(
W
whs 已提交
44 45
                        name=paddle.fluid.unique_name.
                        generate_with_ignorable_key(".".join(
C
ceci3 已提交
46 47
                            ["reserve_space", 'tmp'])),
                        dtype=block.var(op.input("X")[0]).dtype,
W
whs 已提交
48
                        type=paddle.framework.core.VarDesc.VarType.LOD_TENSOR,
C
ceci3 已提交
49 50 51
                        persistable=False,
                        stop_gradient=True)
                    op.desc.set_output("ReserveSpace", [reserve_space.name])
52 53 54 55 56 57 58 59 60 61 62 63
            if op.type == 'transpose2':
                if 'XShape' not in op.output_names:
                    xshape = block.create_var(
                        name=paddle.fluid.unique_name.
                        generate_with_ignorable_key(".".join(["xshape", 'tmp'
                                                              ])),
                        dtype=block.var(op.input("X")[0]).dtype,
                        type=paddle.framework.core.VarDesc.VarType.LOD_TENSOR,
                        shape=(0, ) + block.var(op.input("X")[0]).shape,
                        persistable=False,
                        stop_gradient=True)
                    op.desc.set_output("XShape", [xshape.name])
C
ceci3 已提交
64 65 66 67 68 69 70 71
    return program


def _recover_param_attr(program):
    """recover parameters attribute.
       Params in infermodel are stored in the form of variable, which can not be trained."""
    all_weights = [param for param in program.list_vars() \
        if param.persistable is True and param.name != 'feed' and param.name != 'fetch']
W
whs 已提交
72 73 74 75 76 77
    with paddle.static.program_guard(program):
        for w in all_weights:
            new_w = paddle.create_parameter(
                shape=w.shape, dtype=w.dtype, name=w.name)
            new_w.set_value(w.get_value())
            program.block(0).vars[w.name] = new_w
C
ceci3 已提交
78 79 80 81 82 83 84
    return program


def recover_inference_program(inference_program):
    """  recover inference program to train program which can be trained. """
    _remove_fetch_node(inference_program)
    inference_program = _recover_param_attr(inference_program)
85
    inference_program = _recover_outputs_attr(inference_program)
C
ceci3 已提交
86 87 88 89 90 91 92
    for var in inference_program.list_vars():
        var.stop_gradient = False

    for op in inference_program.global_block().ops:
        op._set_attr("is_test", False)

    return inference_program