theseus_layer.py 3.0 KB
Newer Older
W
weishengyu 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13
from abc import ABC
from paddle import nn
import re


class Identity(nn.Layer):
    def __init__(self):
        super(Identity, self).__init__()

    def forward(self, inputs):
        return inputs


W
dbg  
weishengyu 已提交
14
class TheseusLayer(nn.Layer):
W
weishengyu 已提交
15
    def __init__(self, *args, **kwargs):
W
weishengyu 已提交
16
        super(TheseusLayer, self).__init__()
W
dbg  
weishengyu 已提交
17
        self.res_dict = {}
W
weishengyu 已提交
18

19 20
    # stop doesn't work when stop layer has a parallel branch.
    def stop_after(self, stop_layer_name: str):
W
weishengyu 已提交
21 22 23 24
        after_stop = False
        for layer_i in self._sub_layers:
            if after_stop:
                self._sub_layers[layer_i] = Identity()
25 26 27 28 29 30
                continue
            layer_name = self._sub_layers[layer_i].full_name()
            if layer_name == stop_layer_name:
                after_stop = True
                continue
            if isinstance(self._sub_layers[layer_i], TheseusLayer):
D
dongshuilong 已提交
31 32
                after_stop = self._sub_layers[layer_i].stop_after(
                    stop_layer_name)
33 34
        return after_stop

W
weishengyu 已提交
35
    def _update_res(self, return_patterns):
36 37
        for layer_i in self._sub_layers:
            layer_name = self._sub_layers[layer_i].full_name()
W
weishengyu 已提交
38 39
            for return_pattern in return_patterns:
                if return_patterns is not None and re.match(return_pattern, layer_name):
W
dbg  
weishengyu 已提交
40 41 42
                    if isinstance(self._sub_layers[layer_i], TheseusLayer):
                        self._sub_layers[layer_i].register_forward_post_hook(
                            self._sub_layers[layer_i]._save_sub_res_hook)
W
weishengyu 已提交
43
                        self._sub_layers[layer_i].res_dict = self.res_dict
W
weishengyu 已提交
44
            if isinstance(self._sub_layers[layer_i], TheseusLayer):
W
weishengyu 已提交
45
                self._sub_layers[layer_i]._update_res(return_patterns)
W
weishengyu 已提交
46

W
weishengyu 已提交
47
    def _save_sub_res_hook(self, layer, input, output):
W
weishengyu 已提交
48 49
        if self.res_dict is not None:
            self.res_dict[layer.full_name()] = output
W
weishengyu 已提交
50 51 52 53

    def replace_sub(self, layer_name_pattern, replace_function, recursive=True):
        for layer_i in self._sub_layers:
            layer_name = self._sub_layers[layer_i].full_name()
W
weishengyu 已提交
54
            if re.match(layer_name_pattern, layer_name):
W
weishengyu 已提交
55
                self._sub_layers[layer_i] = replace_function(self._sub_layers[layer_i])
D
dongshuilong 已提交
56
            if recursive:
W
weishengyu 已提交
57 58
                if isinstance(self._sub_layers[layer_i], TheseusLayer):
                    self._sub_layers[layer_i].replace_sub(
D
dongshuilong 已提交
59
                        layer_name_pattern, replace_function, recursive)
W
weishengyu 已提交
60 61 62
                elif isinstance(self._sub_layers[layer_i], (nn.Sequential, nn.LayerList)):
                    for layer_j in self._sub_layers[layer_i]._sub_layers:
                        self._sub_layers[layer_i]._sub_layers[layer_j].replace_sub(
D
dongshuilong 已提交
63
                            layer_name_pattern, replace_function, recursive)
W
weishengyu 已提交
64 65 66 67 68 69 70 71 72 73 74 75

    '''
    example of replace function:
    def replace_conv(origin_conv: nn.Conv2D):
        new_conv = nn.Conv2D(
            in_channels=origin_conv._in_channels,
            out_channels=origin_conv._out_channels,
            kernel_size=origin_conv._kernel_size,
            stride=2
        )
        return new_conv

D
dongshuilong 已提交
76
        '''