model.py 6.4 KB
Newer Older
Y
Yelrose 已提交
1 2 3 4
import pgl
import paddle.fluid.layers as L
import pgl.layers.conv as conv

Y
Yelrose 已提交
5
def get_norm(indegree):
Y
Yelrose 已提交
6 7 8
    float_degree = L.cast(indegree, dtype="float32")
    float_degree = L.clamp(float_degree, min=1.0)
    norm = L.pow(float_degree, factor=-0.5) 
Y
Yelrose 已提交
9 10 11
    return norm
    

Y
Yelrose 已提交
12 13 14 15 16 17 18 19
class GCN(object):
    """Implement of GCN
    """
    def __init__(self, config, num_class):
        self.num_class = num_class
        self.num_layers = config.get("num_layers", 1)
        self.hidden_size = config.get("hidden_size", 64)
        self.dropout = config.get("dropout", 0.5)
Y
Yelrose 已提交
20
        self.edge_dropout = config.get("edge_dropout", 0.0)
Y
Yelrose 已提交
21

Y
Yelrose 已提交
22 23
    def forward(self, graph_wrapper, feature, phase):
        
Y
Yelrose 已提交
24
        for i in range(self.num_layers):
Y
Yelrose 已提交
25 26 27 28 29 30 31 32 33 34

            if phase == "train":
                ngw = pgl.sample.edge_drop(graph_wrapper, self.edge_dropout) 
                norm = get_norm(ngw.indegree())
            else:
                ngw = graph_wrapper
                norm = graph_wrapper.node_feat["norm"]


            feature = pgl.layers.gcn(ngw,
Y
Yelrose 已提交
35 36 37
                feature,
                self.hidden_size,
                activation="relu",
Y
Yelrose 已提交
38
                norm=norm,
Y
Yelrose 已提交
39 40
                name="layer_%s" % i)

Y
Yelrose 已提交
41
            feature = L.dropout(
Y
Yelrose 已提交
42 43 44 45
                    feature,
                    self.dropout,
                    dropout_implementation='upscale_in_train')

Y
Yelrose 已提交
46 47 48 49 50 51 52 53
        if phase == "train": 
            ngw = pgl.sample.edge_drop(graph_wrapper, self.edge_dropout) 
            norm = get_norm(ngw.indegree())
        else:
            ngw = graph_wrapper
            norm = graph_wrapper.node_feat["norm"]

        feature = conv.gcn(ngw,
Y
Yelrose 已提交
54 55 56
                     feature,
                     self.num_class,
                     activation=None,
Y
Yelrose 已提交
57
                     norm=norm,
Y
Yelrose 已提交
58 59 60 61 62 63 64 65 66 67 68 69 70
                     name="output")

        return feature

class GAT(object):
    """Implement of GAT"""
    def __init__(self, config, num_class):
        self.num_class = num_class 
        self.num_layers = config.get("num_layers", 1)
        self.num_heads = config.get("num_heads", 8)
        self.hidden_size = config.get("hidden_size", 8)
        self.feat_dropout = config.get("feat_drop", 0.6)
        self.attn_dropout = config.get("attn_drop", 0.6)
Y
Yelrose 已提交
71 72 73 74 75
        self.edge_dropout = config.get("edge_dropout", 0.0)

    def forward(self, graph_wrapper, feature, phase):
        if phase == "train": 
            edge_dropout = self.edge_dropout
76 77
        else:
            edge_dropout = 0
Y
Yelrose 已提交
78 79

        for i in range(self.num_layers):
Y
Yelrose 已提交
80 81 82
            ngw = pgl.sample.edge_drop(graph_wrapper, edge_dropout) 
                
            feature = conv.gat(ngw,
Y
Yelrose 已提交
83 84 85 86 87 88 89 90
                                feature,
                                self.hidden_size,
                                activation="elu",
                                name="gat_layer_%s" % i,
                                num_heads=self.num_heads,
                                feat_drop=self.feat_dropout,
                                attn_drop=self.attn_dropout)

Y
Yelrose 已提交
91 92
        ngw = pgl.sample.edge_drop(graph_wrapper, edge_dropout) 
        feature = conv.gat(ngw,
Y
Yelrose 已提交
93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111
                     feature,
                     self.num_class,
                     num_heads=1,
                     activation=None,
                     feat_drop=self.feat_dropout,
                     attn_drop=self.attn_dropout,
                     name="output")
        return feature

   
class APPNP(object):
    """Implement of APPNP"""
    def __init__(self, config, num_class):
        self.num_class = num_class
        self.num_layers = config.get("num_layers", 1)
        self.hidden_size = config.get("hidden_size", 64)
        self.dropout = config.get("dropout", 0.5)
        self.alpha = config.get("alpha", 0.1)
        self.k_hop = config.get("k_hop", 10)
Y
Yelrose 已提交
112 113 114 115 116
        self.edge_dropout = config.get("edge_dropout", 0.0)

    def forward(self, graph_wrapper, feature, phase):
        if phase == "train": 
            edge_dropout = self.edge_dropout
117 118
        else:
            edge_dropout = 0
Y
Yelrose 已提交
119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135

        for i in range(self.num_layers):
            feature = L.dropout(
                feature,
                self.dropout,
                dropout_implementation='upscale_in_train')
            feature = L.fc(feature, self.hidden_size, act="relu", name="lin%s" % i)

        feature = L.dropout(
            feature,
            self.dropout,
            dropout_implementation='upscale_in_train')

        feature = L.fc(feature, self.num_class, act=None, name="output")

        feature = conv.appnp(graph_wrapper,
            feature=feature,
Y
Yelrose 已提交
136
            edge_dropout=edge_dropout,
Y
Yelrose 已提交
137 138 139 140
            alpha=self.alpha,
            k_hop=self.k_hop)
        return feature

Y
Yelrose 已提交
141 142 143 144 145 146 147 148 149
class SGC(object):
    """Implement of SGC"""
    def __init__(self, config, num_class):
        self.num_class = num_class
        self.num_layers = config.get("num_layers", 1)

    def forward(self, graph_wrapper, feature, phase):
        feature = conv.appnp(graph_wrapper,
            feature=feature,
Y
Yelrose 已提交
150
            edge_dropout=0,
Y
Yelrose 已提交
151 152 153
            alpha=0,
            k_hop=self.num_layers)
        feature.stop_gradient=True
Y
Yelrose 已提交
154
        feature = L.fc(feature, self.num_class, act=None, bias_attr=False, name="output")
Y
Yelrose 已提交
155 156
        return feature

Y
Yelrose 已提交
157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172
 
class GCNII(object):
    """Implement of GCNII"""
    def __init__(self, config, num_class):
        self.num_class = num_class
        self.num_layers = config.get("num_layers", 1)
        self.hidden_size = config.get("hidden_size", 64)
        self.dropout = config.get("dropout", 0.6)
        self.alpha = config.get("alpha", 0.1)
        self.lambda_l = config.get("lambda_l", 0.5)
        self.k_hop = config.get("k_hop", 64)
        self.edge_dropout = config.get("edge_dropout", 0.0)

    def forward(self, graph_wrapper, feature, phase):
        if phase == "train": 
            edge_dropout = self.edge_dropout
173 174
        else:
            edge_dropout = 0
Y
Yelrose 已提交
175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193

        for i in range(self.num_layers):
            feature = L.fc(feature, self.hidden_size, act="relu", name="lin%s" % i)
            feature = L.dropout(
                feature,
                self.dropout,
                dropout_implementation='upscale_in_train')

        feature = conv.gcnii(graph_wrapper,
            feature=feature,
            name="gcnii",
            activation="relu",
            lambda_l=self.lambda_l,
            alpha=self.alpha,
            dropout=self.dropout,
            k_hop=self.k_hop)

        feature = L.fc(feature, self.num_class, act=None, name="output")
        return feature