network_conf.py 10.9 KB
Newer Older
S
Superjom 已提交
1 2
from paddle import v2 as paddle
from paddle.v2.attr import ParamAttr
S
Superjom 已提交
3
from utils import TaskType, logger, ModelType, ModelArch
S
Superjom 已提交
4 5 6 7 8 9


class DSSM(object):
    def __init__(self,
                 dnn_dims=[],
                 vocab_sizes=[],
S
Superjom 已提交
10 11
                 model_type=ModelType.create_classification(),
                 model_arch=ModelArch.create_cnn(),
S
Superjom 已提交
12 13
                 share_semantic_generator=False,
                 class_num=None,
S
Superjom 已提交
14 15
                 share_embed=False,
                 is_infer=False):
S
Superjom 已提交
16 17 18 19 20
        '''
        @dnn_dims: list of int
            dimentions of each layer in semantic vector generator.
        @vocab_sizes: 2-d tuple
            size of both left and right items.
S
Superjom 已提交
21 22 23 24
        @model_type: int
            type of task, should be 'rank: 0', 'regression: 1' or 'classification: 2'
        @model_arch: int
            model architecture
S
Superjom 已提交
25 26 27 28 29 30 31
        @share_semantic_generator: bool
            whether to share the semantic vector generator for both left and right.
        @share_embed: bool
            whether to share the embeddings between left and right.
        @class_num: int
            number of categories.
        '''
C
caoying03 已提交
32 33 34
        assert len(vocab_sizes) == 2, (
            "vocab_sizes specify the sizes left and right inputs, "
            "and dim should be 2.")
S
Superjom 已提交
35
        assert len(dnn_dims) > 1, "more than two layers is needed."
S
Superjom 已提交
36 37 38 39 40

        self.dnn_dims = dnn_dims
        self.vocab_sizes = vocab_sizes
        self.share_semantic_generator = share_semantic_generator
        self.share_embed = share_embed
S
Superjom 已提交
41 42
        self.model_type = ModelType(model_type)
        self.model_arch = ModelArch(model_arch)
S
Superjom 已提交
43
        self.class_num = class_num
S
Superjom 已提交
44
        self.is_infer = is_infer
S
Superjom 已提交
45 46
        logger.warning("build DSSM model with config of %s, %s" %
                       (self.model_type, self.model_arch))
S
Superjom 已提交
47 48
        logger.info("vocabulary sizes: %s" % str(self.vocab_sizes))

S
Superjom 已提交
49 50 51 52
        # bind model architecture
        _model_arch = {
            'cnn': self.create_cnn,
            'fc': self.create_fc,
S
Superjom 已提交
53
            'rnn': self.create_rnn,
S
Superjom 已提交
54
        }
S
Superjom 已提交
55 56 57 58 59 60 61

        def _model_arch_creater(emb, prefix=''):
            sent_vec = _model_arch.get(str(model_arch))(emb, prefix)
            dnn = self.create_dnn(sent_vec, prefix)
            return dnn

        self.model_arch_creater = _model_arch_creater
S
Superjom 已提交
62 63 64 65 66 67 68

        # build model type
        _model_type = {
            'classification': self._build_classification_model,
            'rank': self._build_rank_model,
            'regression': self._build_regression_model,
        }
S
Superjom 已提交
69
        print 'model type: ', str(self.model_type)
S
Superjom 已提交
70 71
        self.model_type_creater = _model_type[str(self.model_type)]

S
Superjom 已提交
72
    def __call__(self):
S
Superjom 已提交
73
        return self.model_type_creater()
S
Superjom 已提交
74 75 76 77 78

    def create_embedding(self, input, prefix=''):
        '''
        Create an embedding table whose name has a `prefix`.
        '''
S
Superjom 已提交
79 80
        logger.info("create embedding table [%s] which dimention is %d" %
                    (prefix, self.dnn_dims[0]))
S
Superjom 已提交
81 82 83 84 85 86 87 88 89 90 91 92 93
        emb = paddle.layer.embedding(
            input=input,
            size=self.dnn_dims[0],
            param_attr=ParamAttr(name='%s_emb.w' % prefix))
        return emb

    def create_fc(self, emb, prefix=''):
        '''
        A multi-layer fully connected neural networks.

        @emb: paddle.layer
            output of the embedding layer
        @prefix: str
C
caoying03 已提交
94 95
            prefix of layers' names, used to share parameters between
            more than one `fc` parts.
S
Superjom 已提交
96 97 98
        '''
        _input_layer = paddle.layer.pooling(
            input=emb, pooling_type=paddle.pooling.Max())
R
ranqiu 已提交
99 100 101 102 103
        fc = paddle.layer.fc(
            input=_input_layer,
            size=self.dnn_dims[1],
            param_attr=ParamAttr(name='%s_fc.w' % prefix),
            bias_attr=ParamAttr(name="%s_fc.b" % prefix))
S
Superjom 已提交
104 105 106 107 108 109
        return fc

    def create_rnn(self, emb, prefix=''):
        '''
        A GRU sentence vector learner.
        '''
R
ranqiu 已提交
110 111 112 113 114 115 116
        gru = paddle.networks.simple_gru(
            input=emb,
            size=self.dnn_dims[1],
            mixed_param_attr=ParamAttr(name='%s_gru_mixed.w' % prefix),
            mixed_bias_param_attr=ParamAttr(name="%s_gru_mixed.b" % prefix),
            gru_param_attr=ParamAttr(name='%s_gru.w' % prefix),
            gru_bias_attr=ParamAttr(name="%s_gru.b" % prefix))
S
Superjom 已提交
117 118
        sent_vec = paddle.layer.last_seq(gru)
        return sent_vec
S
Superjom 已提交
119 120 121 122 123 124 125 126

    def create_cnn(self, emb, prefix=''):
        '''
        A multi-layer CNN.

        @emb: paddle.layer
            output of the embedding layer
        @prefix: str
C
caoying03 已提交
127 128
            prefix of layers' names, used to share parameters between
            more than one `cnn` parts.
S
Superjom 已提交
129 130
        '''

S
Superjom 已提交
131 132 133 134 135 136 137 138 139 140 141 142
        def create_conv(context_len, hidden_size, prefix):
            key = "%s_%d_%d" % (prefix, context_len, hidden_size)
            conv = paddle.networks.sequence_conv_pool(
                input=emb,
                context_len=context_len,
                hidden_size=hidden_size,
                # set parameter attr for parameter sharing
                context_proj_param_attr=ParamAttr(name=key + 'contex_proj.w'),
                fc_param_attr=ParamAttr(name=key + '_fc.w'),
                fc_bias_attr=ParamAttr(name=key + '_fc.b'),
                pool_bias_attr=ParamAttr(name=key + '_pool.b'))
            return conv
S
Superjom 已提交
143

S
Superjom 已提交
144 145 146 147
        logger.info('create a sequence_conv_pool which context width is 3')
        conv_3 = create_conv(3, self.dnn_dims[1], "cnn")
        logger.info('create a sequence_conv_pool which context width is 4')
        conv_4 = create_conv(4, self.dnn_dims[1], "cnn")
S
Superjom 已提交
148

S
Superjom 已提交
149 150 151
        return conv_3, conv_4

    def create_dnn(self, sent_vec, prefix):
S
Superjom 已提交
152
        # if more than three layers, than a fc layer will be added.
S
Superjom 已提交
153 154 155
        if len(self.dnn_dims) > 1:
            _input_layer = sent_vec
            for id, dim in enumerate(self.dnn_dims[1:]):
S
Superjom 已提交
156 157 158 159 160 161 162 163 164 165 166
                name = "%s_fc_%d_%d" % (prefix, id, dim)
                logger.info("create fc layer [%s] which dimention is %d" %
                            (name, dim))
                fc = paddle.layer.fc(
                    input=_input_layer,
                    size=dim,
                    act=paddle.activation.Tanh(),
                    param_attr=ParamAttr(name='%s.w' % name),
                    bias_attr=ParamAttr(name='%s.b' % name))
                _input_layer = fc
        return _input_layer
S
Superjom 已提交
167

S
Superjom 已提交
168
    def _build_classification_model(self):
S
Superjom 已提交
169 170
        logger.info("build classification model")
        assert self.model_type.is_classification()
S
Superjom 已提交
171 172
        return self._build_classification_or_regression_model(
            is_classification=True)
S
Superjom 已提交
173

S
Superjom 已提交
174
    def _build_regression_model(self):
S
Superjom 已提交
175 176
        logger.info("build regression model")
        assert self.model_type.is_regression()
S
Superjom 已提交
177 178
        return self._build_classification_or_regression_model(
            is_classification=False)
S
Superjom 已提交
179 180 181 182 183 184 185 186 187

    def _build_rank_model(self):
        '''
        Build a pairwise rank model, and the cost is returned.

        A pairwise rank model has 3 inputs:
          - source sentence
          - left_target sentence
          - right_target sentence
C
caoying03 已提交
188 189
          - label, 1 if left_target should be sorted in front of
                   right_target, otherwise 0.
S
Superjom 已提交
190
        '''
S
Superjom 已提交
191 192
        logger.info("build rank model")
        assert self.model_type.is_rank()
S
Superjom 已提交
193 194 195 196 197 198 199 200 201
        source = paddle.layer.data(
            name='source_input',
            type=paddle.data_type.integer_value_sequence(self.vocab_sizes[0]))
        left_target = paddle.layer.data(
            name='left_target_input',
            type=paddle.data_type.integer_value_sequence(self.vocab_sizes[1]))
        right_target = paddle.layer.data(
            name='right_target_input',
            type=paddle.data_type.integer_value_sequence(self.vocab_sizes[1]))
S
Superjom 已提交
202 203 204
        if not self.is_infer:
            label = paddle.layer.data(
                name='label_input', type=paddle.data_type.integer_value(1))
S
Superjom 已提交
205 206

        prefixs = '_ _ _'.split(
R
ranqiu 已提交
207
        ) if self.share_semantic_generator else 'source target target'.split()
S
Superjom 已提交
208 209 210 211 212 213 214 215 216 217
        embed_prefixs = '_ _'.split(
        ) if self.share_embed else 'source target target'.split()

        word_vecs = []
        for id, input in enumerate([source, left_target, right_target]):
            x = self.create_embedding(input, prefix=embed_prefixs[id])
            word_vecs.append(x)

        semantics = []
        for id, input in enumerate(word_vecs):
S
Superjom 已提交
218
            x = self.model_arch_creater(input, prefix=prefixs[id])
S
Superjom 已提交
219 220 221 222 223 224 225
            semantics.append(x)

        # cossim score of source and left_target
        left_score = paddle.layer.cos_sim(semantics[0], semantics[1])
        # cossim score of source and right target
        right_score = paddle.layer.cos_sim(semantics[0], semantics[2])

S
Superjom 已提交
226 227 228 229 230 231 232
        if not self.is_infer:
            # rank cost
            cost = paddle.layer.rank_cost(left_score, right_score, label=label)
            # prediction = left_score - right_score
            # but this operator is not supported currently.
            # so AUC will not used.
            return cost, None, label
S
Superjom 已提交
233
        return right_score
S
Superjom 已提交
234

S
Superjom 已提交
235 236
    def _build_classification_or_regression_model(self, is_classification):
        '''
S
Superjom 已提交
237
        Build a classification/regression model, and the cost is returned.
S
Superjom 已提交
238 239 240 241 242 243 244

        A Classification has 3 inputs:
          - source sentence
          - target sentence
          - classification label

        '''
S
Superjom 已提交
245 246 247
        if is_classification:
            # prepare inputs.
            assert self.class_num
S
Superjom 已提交
248 249 250 251 252 253 254 255 256 257

        source = paddle.layer.data(
            name='source_input',
            type=paddle.data_type.integer_value_sequence(self.vocab_sizes[0]))
        target = paddle.layer.data(
            name='target_input',
            type=paddle.data_type.integer_value_sequence(self.vocab_sizes[1]))
        label = paddle.layer.data(
            name='label_input',
            type=paddle.data_type.integer_value(self.class_num)
S
Superjom 已提交
258
            if is_classification else paddle.data_type.dense_vector(1))
S
Superjom 已提交
259 260

        prefixs = '_ _'.split(
R
ranqiu 已提交
261
        ) if self.share_semantic_generator else 'source target'.split()
S
Superjom 已提交
262
        embed_prefixs = '_ _'.split(
R
ranqiu 已提交
263
        ) if self.share_embed else 'source target'.split()
S
Superjom 已提交
264 265 266 267 268 269 270 271 272 273 274

        word_vecs = []
        for id, input in enumerate([source, target]):
            x = self.create_embedding(input, prefix=embed_prefixs[id])
            word_vecs.append(x)

        semantics = []
        for id, input in enumerate(word_vecs):
            x = self.model_arch_creater(input, prefix=prefixs[id])
            semantics.append(x)

S
Superjom 已提交
275 276 277 278 279 280 281 282 283 284
        if is_classification:
            concated_vector = paddle.layer.concat(semantics)
            prediction = paddle.layer.fc(
                input=concated_vector,
                size=self.class_num,
                act=paddle.activation.Softmax())
            cost = paddle.layer.classification_cost(
                input=prediction, label=label)
        else:
            prediction = paddle.layer.cos_sim(*semantics)
285
            cost = paddle.layer.square_error_cost(prediction, label)
S
Superjom 已提交
286

S
Superjom 已提交
287 288
        if not self.is_infer:
            return cost, prediction, label
S
Superjom 已提交
289
        return prediction