model.py 5.3 KB
Newer Older
Y
Yelrose 已提交
1 2 3 4
import pgl
import paddle.fluid.layers as L
import pgl.layers.conv as conv

Y
Yelrose 已提交
5 6 7 8 9 10
def get_norm(indegree):
    norm = L.pow(L.cast(indegree, dtype="float32") + 1e-6, factor=-0.5) 
    norm = norm * L.cast(indegree > 0, dtype="float32")
    return norm
    

Y
Yelrose 已提交
11 12 13 14 15 16 17 18
class GCN(object):
    """Implement of GCN
    """
    def __init__(self, config, num_class):
        self.num_class = num_class
        self.num_layers = config.get("num_layers", 1)
        self.hidden_size = config.get("hidden_size", 64)
        self.dropout = config.get("dropout", 0.5)
Y
Yelrose 已提交
19
        self.edge_dropout = config.get("edge_dropout", 0.0)
Y
Yelrose 已提交
20

Y
Yelrose 已提交
21 22
    def forward(self, graph_wrapper, feature, phase):
        
Y
Yelrose 已提交
23
        for i in range(self.num_layers):
Y
Yelrose 已提交
24 25 26 27 28 29 30 31 32 33 34 35 36 37

            if phase == "train":
                ngw = pgl.sample.edge_drop(graph_wrapper, self.edge_dropout) 
                norm = get_norm(ngw.indegree())
            else:
                ngw = graph_wrapper
                norm = graph_wrapper.node_feat["norm"]

            feature = L.dropout(
                    feature,
                    self.dropout,
                    dropout_implementation='upscale_in_train')

            feature = pgl.layers.gcn(ngw,
Y
Yelrose 已提交
38 39 40
                feature,
                self.hidden_size,
                activation="relu",
Y
Yelrose 已提交
41
                norm=norm,
Y
Yelrose 已提交
42 43
                name="layer_%s" % i)

Y
Yelrose 已提交
44
        feature = L.dropout(
Y
Yelrose 已提交
45 46 47 48
                    feature,
                    self.dropout,
                    dropout_implementation='upscale_in_train')

Y
Yelrose 已提交
49 50 51 52 53 54 55 56
        if phase == "train": 
            ngw = pgl.sample.edge_drop(graph_wrapper, self.edge_dropout) 
            norm = get_norm(ngw.indegree())
        else:
            ngw = graph_wrapper
            norm = graph_wrapper.node_feat["norm"]

        feature = conv.gcn(ngw,
Y
Yelrose 已提交
57 58 59
                     feature,
                     self.num_class,
                     activation=None,
Y
Yelrose 已提交
60
                     norm=norm,
Y
Yelrose 已提交
61 62 63 64 65 66 67 68 69 70 71 72 73
                     name="output")

        return feature

class GAT(object):
    """Implement of GAT"""
    def __init__(self, config, num_class):
        self.num_class = num_class 
        self.num_layers = config.get("num_layers", 1)
        self.num_heads = config.get("num_heads", 8)
        self.hidden_size = config.get("hidden_size", 8)
        self.feat_dropout = config.get("feat_drop", 0.6)
        self.attn_dropout = config.get("attn_drop", 0.6)
Y
Yelrose 已提交
74 75 76 77 78 79 80
        self.edge_dropout = config.get("edge_dropout", 0.0)

    def forward(self, graph_wrapper, feature, phase):
        if phase == "train": 
            edge_dropout = 0
        else:
            edge_dropout = self.edge_dropout
Y
Yelrose 已提交
81 82

        for i in range(self.num_layers):
Y
Yelrose 已提交
83 84 85
            ngw = pgl.sample.edge_drop(graph_wrapper, edge_dropout) 
                
            feature = conv.gat(ngw,
Y
Yelrose 已提交
86 87 88 89 90 91 92 93
                                feature,
                                self.hidden_size,
                                activation="elu",
                                name="gat_layer_%s" % i,
                                num_heads=self.num_heads,
                                feat_drop=self.feat_dropout,
                                attn_drop=self.attn_dropout)

Y
Yelrose 已提交
94 95
        ngw = pgl.sample.edge_drop(graph_wrapper, edge_dropout) 
        feature = conv.gat(ngw,
Y
Yelrose 已提交
96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114
                     feature,
                     self.num_class,
                     num_heads=1,
                     activation=None,
                     feat_drop=self.feat_dropout,
                     attn_drop=self.attn_dropout,
                     name="output")
        return feature

   
class APPNP(object):
    """Implement of APPNP"""
    def __init__(self, config, num_class):
        self.num_class = num_class
        self.num_layers = config.get("num_layers", 1)
        self.hidden_size = config.get("hidden_size", 64)
        self.dropout = config.get("dropout", 0.5)
        self.alpha = config.get("alpha", 0.1)
        self.k_hop = config.get("k_hop", 10)
Y
Yelrose 已提交
115 116 117 118 119 120 121
        self.edge_dropout = config.get("edge_dropout", 0.0)

    def forward(self, graph_wrapper, feature, phase):
        if phase == "train": 
            edge_dropout = 0
        else:
            edge_dropout = self.edge_dropout
Y
Yelrose 已提交
122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138

        for i in range(self.num_layers):
            feature = L.dropout(
                feature,
                self.dropout,
                dropout_implementation='upscale_in_train')
            feature = L.fc(feature, self.hidden_size, act="relu", name="lin%s" % i)

        feature = L.dropout(
            feature,
            self.dropout,
            dropout_implementation='upscale_in_train')

        feature = L.fc(feature, self.num_class, act=None, name="output")

        feature = conv.appnp(graph_wrapper,
            feature=feature,
Y
Yelrose 已提交
139
            edge_dropout=edge_dropout,
Y
Yelrose 已提交
140 141 142 143
            alpha=self.alpha,
            k_hop=self.k_hop)
        return feature

Y
Yelrose 已提交
144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159
class SGC(object):
    """Implement of SGC"""
    def __init__(self, config, num_class):
        self.num_class = num_class
        self.num_layers = config.get("num_layers", 1)

    def forward(self, graph_wrapper, feature, phase):
        feature = conv.appnp(graph_wrapper,
            feature=feature,
            norm=graph_wrapper.node_feat["norm"],
            alpha=0,
            k_hop=self.num_layers)
        feature.stop_gradient=True
        feature = L.fc(feature, self.num_class, act=None, name="output")
        return feature