fc_fuser.py 6.2 KB
Newer Older
S
SunAhong1993 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
#   Copyright (c) 2020  PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import numpy as np
S
SunAhong1993 已提交
16 17
from x2paddle.optimizer.pattern_matcher import FuseBase
from x2paddle.core.program import PaddleGraph, PaddleLayer
S
SunAhong1993 已提交
18 19 20
from x2paddle.core.util import *


S
SunAhong1993 已提交
21
class FcFuser(FuseBase):
S
SunAhong1993 已提交
22
    def __init__(self):
S
SunAhong1993 已提交
23
        self.linear_index = 0
S
SunAhong1993 已提交
24
        super(FcFuser, self).__init__(graph_type="dygraph")
S
SunAhong1993 已提交
25 26

    def build_pattern(self):
S
SunAhong1993 已提交
27
        """ 描述需要替换的fc图结构。
S
SunAhong1993 已提交
28
        fc层模式python实现代码示例:
29 30
            x133 = x128.shape
            x133 = len(x133)
S
SunAhong1993 已提交
31
            x134 = x133 == 2
32 33 34 35
            if x134 :
                classifier_6_weight = self.classifier_6_weight
                x136 = fluid.layers.transpose(x=classifier_6_weight, perm=[1, 0])
                classifier_6_bias = self.classifier_6_bias
S
SunAhong1993 已提交
36
                x137 = paddle.addmm(input=classifier_6_bias, x=x128, y=x136, beta=1, alpha=1)
37
                x135 = x137
S
SunAhong1993 已提交
38
            else:
39 40 41 42 43 44
                classifier_6_weight = self.classifier_6_weight
                x138 = fluid.layers.transpose(x=classifier_6_weight, perm=[1, 0])
                x139 = fluid.layers.matmul(x=x128, y=x138)
                classifier_6_bias = self.classifier_6_bias
                x140 = x139 + 1 * classifier_6_bias
                x135 = x140
S
SunAhong1993 已提交
45 46 47 48 49 50
        """

        def gen_name(id):
            return "x" + str(id)

        self.pattern.add_layer(
S
SunAhong1993 已提交
51 52
            "fluid.layers.shape",
            inputs={'input': "fc-input-0"},
S
SunAhong1993 已提交
53 54 55 56 57
            outputs=[gen_name(2)])
        self.pattern.add_layer(
            "prim.len", inputs={'input': gen_name(2)}, outputs=[gen_name(2)])
        self.pattern.add_layer(
            "prim.eq",
S
SunAhong1993 已提交
58 59 60
            inputs={"eq0": gen_name(2)},
            outputs=[gen_name(3)],
            eq1=2)
S
SunAhong1993 已提交
61 62
        self.pattern.add_layer("prim.if", {'input': gen_name(3)}, [gen_name(4)])
        self.pattern.outputs.append(gen_name(4))
S
SunAhong1993 已提交
63
        if_layer1 = self.pattern.layers[list(self.pattern.layers.keys())[-1]]
S
SunAhong1993 已提交
64
        pattern_block0 = PaddleGraph(if_layer1, graph_type="dygraph")
S
SunAhong1993 已提交
65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80
        pattern_block0.add_layer(
            "fluid.dygraph.base.to_variable",
            inputs={},
            outputs=[gen_name(5)],
            value="params[{}]".format(string(gen_name(5))))
        pattern_block0.add_layer(
            "fluid.layers.transpose",
            inputs={"x": gen_name(5)},
            outputs=[gen_name(6)],
            perm=[1, 0])
        pattern_block0.add_layer(
            "fluid.dygraph.base.to_variable",
            inputs={},
            outputs=[gen_name(7)],
            value="params[{}]".format(string(gen_name(7))))
        pattern_block0.add_layer(
S
SunAhong1993 已提交
81
            "paddle.addmm",
S
SunAhong1993 已提交
82 83 84 85 86 87
            inputs={"input": gen_name(7),
                    "x": "fc-input-0",
                    "y": gen_name(6)},
            outputs=[gen_name(8)],
            beta=1,
            alpha=1)
S
SunAhong1993 已提交
88
        if_layer1.inputs["input-0"] = "fc-input-0"
S
SunAhong1993 已提交
89 90 91
        self.pattern.inputs.append("fc-input-0")
        pattern_block0.add_layer(
            "prim.equal", inputs={'input': gen_name(8)}, outputs=[gen_name(4)])
S
SunAhong1993 已提交
92
        if_layer1.add_block(pattern_block0)
S
SunAhong1993 已提交
93
        pattern_block1 = PaddleGraph(if_layer1, graph_type="dygraph")
S
SunAhong1993 已提交
94 95 96 97 98 99 100 101 102 103 104
        pattern_block1.add_layer(
            "fluid.dygraph.base.to_variable",
            inputs={},
            outputs=[gen_name(5)],
            value="params[{}]".format(string(gen_name(5))))
        pattern_block1.add_layer(
            "fluid.layers.transpose",
            inputs={"x": gen_name(5)},
            outputs=[gen_name(6)],
            perm=[1, 0])
        pattern_block1.add_layer(
S
SunAhong1993 已提交
105
            "paddle.matmul",
S
SunAhong1993 已提交
106 107 108
            inputs={"x": "fc-input-0",
                    "y": gen_name(6)},
            outputs=[gen_name(9)])
S
SunAhong1993 已提交
109
        if_layer1.inputs["input-1"] = "fc-input-0"
S
SunAhong1993 已提交
110 111 112 113 114
        pattern_block1.add_layer(
            "fluid.dygraph.base.to_variable",
            inputs={},
            outputs=[gen_name(12)],
            value="params[{}]".format(string(gen_name(12))))
115
        pattern_block1.add_layer(
S
SunAhong1993 已提交
116
            "prim.add_",
S
SunAhong1993 已提交
117 118 119 120 121
            inputs={"x": gen_name(9),
                    "y": gen_name(12)},
            outputs=[gen_name(13)],
            alpha=1)
        pattern_block1.add_layer(
122
            "prim.equal", inputs={'input': gen_name(13)},
S
SunAhong1993 已提交
123
            outputs=[gen_name(4)])
S
SunAhong1993 已提交
124
        if_layer1.add_block(pattern_block1)
S
SunAhong1993 已提交
125
        self.pattern.build(inputs={"input-0": "fc-input-0"})
S
SunAhong1993 已提交
126

S
SunAhong1993 已提交
127
    def insert_new_layer(self, graph, parameters, matches):
S
SunAhong1993 已提交
128 129 130 131
        new_layer = self.gen_new_layer(parameters, matches)
        new_layer_id = list(matches.keys())[0]
        graph.layers[new_layer_id] = new_layer
        matches.pop(new_layer_id)
S
SunAhong1993 已提交
132

S
SunAhong1993 已提交
133 134
    def gen_new_layer(self, parameters, matches):
        layers_id = list(matches.keys())
S
SunAhong1993 已提交
135
        layer = matches[layers_id[0]]
S
SunAhong1993 已提交
136
        input_name = layer.inputs["input"]
S
SunAhong1993 已提交
137
        layer = matches[layers_id[3]]
S
SunAhong1993 已提交
138
        output_name = layer.outputs[0]
S
SunAhong1993 已提交
139
        layer = matches[layers_id[4]]
S
SunAhong1993 已提交
140
        weight_name = layer.attrs["value"][8:-2]
S
SunAhong1993 已提交
141
        layer = matches[layers_id[6]]
S
SunAhong1993 已提交
142
        bias_name = layer.attrs["value"][8:-2]
S
SunAhong1993 已提交
143
        attrs = dict()
S
SunAhong1993 已提交
144 145
        attrs["in_features"] = parameters[weight_name].shape[1]
        attrs["out_features"] = parameters[weight_name].shape[0]
S
SunAhong1993 已提交
146 147
        linear_name = "linear{}".format(self.linear_index)
        self.linear_index += 1
S
SunAhong1993 已提交
148
        parameters["{}.weight".format(linear_name)] = parameters[
S
SunAhong1993 已提交
149
            weight_name].transpose((1, 0))
S
SunAhong1993 已提交
150 151 152 153
        parameters["{}.bias".format(linear_name)] = np.squeeze(parameters[
            bias_name])
        new_layer = PaddleLayer(
            layers_id[0],
S
SunAhong1993 已提交
154
            "paddle.nn.Linear",
S
SunAhong1993 已提交
155 156 157 158
            inputs={"input": input_name},
            outputs=[linear_name, output_name],
            **attrs)
        return new_layer