test_hsigmoid_op.py 15.4 KB
Newer Older
W
weixing02 已提交
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
W
weixing02 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15 16
from __future__ import print_function

Y
Yancey1989 已提交
17 18
import unittest
import numpy as np
L
Leo Chen 已提交
19
import paddle
J
JiabinYang 已提交
20 21
import paddle.fluid.core as core
import paddle.fluid as fluid
22
from paddle.fluid import Program, program_guard
Y
Yancey1989 已提交
23
import math
24
from op_test import OpTest, skip_check_grad_ci
Y
Yancey1989 已提交
25

D
dzhwinter 已提交
26 27
np.random.seed(100)

Y
Yancey1989 已提交
28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46

def find_latest_set(num):
    return 1 + int(math.floor(math.log(num, 2)))


class CodeTable(object):
    def __init__(self, num_classes, code):
        self.c = num_classes + code

    def cal_index(self, bit):
        return (self.c >> (bit + 1)) - 1

    def get_length(self):
        return find_latest_set(self.c) - 1

    def cal_bit(self, bit):
        return self.c & (1 << bit)


47
class CodeTableWithCustomTree(object):
48 49 50
    def __init__(self, path_table, path_code, index):
        self.ptable_ = path_table
        self.pcode_ = path_code
51 52 53 54 55 56 57
        self.index_ = index

    def cal_index(self, bit):
        return self.ptable_[self.index_][bit]

    def get_length(self):
        length = 0
J
JiabinYang 已提交
58
        for ele in self.ptable_[self.index_]:  # find the first -1 to stop trace
59 60 61 62 63 64 65 66 67 68 69

            if ele >= 0:
                length = length + 1
            else:
                return length
        return length

    def cal_bit(self, bit):
        return self.pcode_[self.index_][bit]


W
weixing02 已提交
70
def hsigmoid(x, w, label, bias, num_classes):
Y
Yancey1989 已提交
71 72 73
    batch_size = x.shape[0]
    code_length = find_latest_set(num_classes - 1)
    code_table = [0 for _ in range(code_length)]
74 75 76
    pre_output = np.zeros((batch_size, code_length)).astype('float64')
    pre_sum = np.zeros((batch_size, 1)).astype('float64')
    out = np.zeros((batch_size, 1)).astype('float64')
W
weixing02 已提交
77
    for i in range(batch_size):
W
weixing02 已提交
78
        code_table = CodeTable(num_classes, label[i])
Y
Yancey1989 已提交
79
        length = code_table.get_length()
W
weixing02 已提交
80
        for j in range(length):
Y
Yancey1989 已提交
81
            idx = code_table.cal_index(j)
J
JiabinYang 已提交
82
            pre_output[i][j] += bias[idx][0]
83 84
    for i in range(batch_size):
        code_table = CodeTable(num_classes, label[i])
W
weixing02 已提交
85
        length = code_table.get_length()
86 87 88
        for j in range(length):
            idx = code_table.cal_index(j)
            pre_output[i][j] += np.dot(w[idx], x[i])
Y
Yancey1989 已提交
89
    # clip[-40.0, 40.0]
W
weixing02 已提交
90
    pre_output = np.clip(pre_output, -40.0, 40.0)
Y
Yancey1989 已提交
91
    # out(i, 0) = \sum_j  bit(i, j) * preout(i, j)
W
weixing02 已提交
92
    for i in range(batch_size):
W
weixing02 已提交
93
        code_table = CodeTable(num_classes, label[i])
Y
Yancey1989 已提交
94 95
        length = code_table.get_length()
        sum = 0.0
W
weixing02 已提交
96
        for j in range(length):
Y
Yancey1989 已提交
97 98 99 100 101 102 103
            if code_table.cal_bit(j):
                sum += pre_output[i][j]
        out[i] = -1.0 * sum
    # soft relu
    pre_output = np.log(1 + np.exp(pre_output))
    pre_sum = pre_output.sum(1).reshape((batch_size, 1))
    out += pre_sum
104
    return pre_output, out
Y
Yancey1989 已提交
105 106


107 108
def hsigmoid_grad(x, w, label, bias, num_classes):
    batch_size = x.shape[0]
109 110 111
    dx = np.zeros(x.shape).astype('float64')
    dw = np.zeros(w.shape).astype('float64')
    db = np.zeros(bias.shape).astype('float64')
112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130
    for i in range(batch_size):
        code_table = CodeTable(num_classes, label[i])
        length = code_table.get_length()
        for j in range(length):
            idx = code_table.cal_index(j)
            t = 1 / (1 + np.exp(-(np.dot(w[idx], x[i]) + bias[idx])))
            dx[i] = dx[i] + t * w[idx]
            dw[idx] += t * x[i]
            db[idx] += t
            if code_table.cal_bit(j):
                dx[i] = dx[i] - w[idx]
                dw[idx] -= x[i]
                db[idx] -= 1
    dx /= batch_size
    dw /= batch_size
    db /= batch_size
    return [dx, dw, db]


131 132
def hsigmoidWithCustomTree(x, w, path_table, path_code, label, bias,
                           num_classes):
133
    batch_size = x.shape[0]
134
    code_length = len(path_table[0])
135
    code_table = [0 for _ in range(code_length)]
J
JiabinYang 已提交
136
    # init pre_out with shape [N, code_length]
137 138 139
    pre_output = np.zeros((batch_size, code_length)).astype('float64')
    pre_sum = np.zeros((batch_size, 1)).astype('float64')
    out = np.zeros((batch_size, 1)).astype('float64')
140 141
    if isinstance(bias, np.ndarray):
        for i in range(batch_size):
142
            code_table = CodeTableWithCustomTree(path_table, path_code, i)
143 144 145 146
            length = code_table.get_length()
            for j in range(length):
                idx = code_table.cal_index(j)
                pre_output[i][j] += bias[idx][0]
147
    for i in range(batch_size):
148
        code_table = CodeTableWithCustomTree(path_table, path_code, i)
149 150 151 152 153 154 155 156
        length = code_table.get_length()
        for j in range(length):
            idx = code_table.cal_index(j)
            pre_output[i][j] += np.dot(w[idx], x[i])
    # clip[-40.0, 40.0]
    pre_output = np.clip(pre_output, -40.0, 40.0)
    # out(i, 0) = \sum_j  bit(i, j) * preout(i, j)
    for i in range(batch_size):
157
        code_table = CodeTableWithCustomTree(path_table, path_code, i)
158 159 160 161 162 163 164 165 166 167 168 169 170
        length = code_table.get_length()
        sum = 0.0
        for j in range(length):
            if code_table.cal_bit(j):
                sum += pre_output[i][j]
        out[i] = -1.0 * sum
    # soft relu
    pre_output = np.log(1 + np.exp(pre_output))
    pre_sum = pre_output.sum(1).reshape((batch_size, 1))
    out += pre_sum
    return pre_output, out


J
JiabinYang 已提交
171 172 173
class TestHSigmoidOp(OpTest):
    def setUp(self):
        self.op_type = "hierarchical_sigmoid"
174 175 176
        num_classes = 101
        feature_size = 5
        batch_size = 20
177 178 179 180 181 182 183
        x = np.random.uniform(-1, 1,
                              (batch_size, feature_size)).astype('float64')
        w = np.random.uniform(-1, 1,
                              (num_classes - 1, feature_size)).astype('float64')
        label = np.random.randint(0, num_classes,
                                  (batch_size, 1)).astype('int64')
        bias = np.random.uniform(-1, 1, (num_classes - 1, 1)).astype('float64')
J
JiabinYang 已提交
184 185 186 187
        self.attrs = {'num_classes': num_classes, 'is_sparse': False}
        self.inputs = {'X': x, 'W': w, 'Label': label, 'Bias': bias}
        pre_output, out = hsigmoid(x, w, label, bias, num_classes)
        self.outputs = {'PreOut': pre_output, 'Out': out}
188
        self.user_grads = hsigmoid_grad(x, w, label, bias, num_classes)
J
JiabinYang 已提交
189 190 191 192 193

    def test_check_output(self):
        self.check_output()

    def test_check_grad(self):
194 195
        self.check_grad(
            ['X', 'W', 'Bias'], ['Out'], user_defined_grads=self.user_grads)
J
JiabinYang 已提交
196 197


198
@skip_check_grad_ci(
199
    reason="For 'TestHSigmoidOpSparse', check_grad is is separately calculated by 'TestHSigmoidOpWithSparseGrad'."
200
)
J
JiabinYang 已提交
201 202 203 204 205 206
class TestHSigmoidOpSparse(OpTest):
    def setUp(self):
        self.op_type = "hierarchical_sigmoid"
        num_classes = 6  #using 1,2,3,4,5,6 to build a huffman tree and select 1,2,5,6 as sample
        feature_size = 8
        batch_size = 4
207 208
        x = np.random.random((batch_size, feature_size))
        w = np.random.random((num_classes - 1, feature_size))
209 210 211 212 213 214 215 216 217
        label = np.array([0, 1, 4, 5]).astype('int64')
        path_table = np.array([
            (0, 2, -1, -1, -1), (0, 1, 3, -1, -1), (0, 1, 4, -1, -1), (0, 2, -1,
                                                                       -1, -1)
        ]).astype(
            'int64')  #np.array to store 1,2,5,6s' non-leaf path(root -> leaf)
        path_code = np.array(
            [(0, 0, -1, -1, -1), (1, 1, 1, -1, -1), (1, 0, 0, -1, -1),
             (0, 1, -1, -1, -1)]).astype('int64')  #np.array to store 
218
        bias = np.random.random((num_classes - 1, 1))
J
JiabinYang 已提交
219 220 221 222
        self.attrs = {'num_classes': num_classes, 'is_sparse': True}
        self.inputs = {
            'X': x,
            'W': w,
223
            'PathTable': path_table,
224
            'PathCode': path_code,
J
JiabinYang 已提交
225 226 227
            'Label': label,
            'Bias': bias
        }
228 229
        pre_output, out = hsigmoidWithCustomTree(x, w, path_table, path_code,
                                                 label, bias, num_classes)
J
JiabinYang 已提交
230 231 232 233 234 235 236 237 238
        self.outputs = {'PreOut': pre_output, 'Out': out}

    def test_check_output(self):
        self.check_output()


class TestHSigmoidOpWithSparseGrad(unittest.TestCase):
    def hs_net_conf(self, is_sparse):
        input_word = fluid.layers.data(name="x", shape=[1], dtype='int64')
239 240 241 242
        path_table = fluid.layers.data(
            name='path_table', shape=[3], dtype='int64')
        path_code = fluid.layers.data(
            name='path_code', shape=[3], dtype='int64')
J
JiabinYang 已提交
243
        label = fluid.layers.data(name='label', shape=[1], dtype='int64')
J
JiabinYang 已提交
244

245
        data_list = [input_word, path_table, path_code, label]
J
JiabinYang 已提交
246 247 248

        emb = fluid.layers.embedding(
            input=input_word,
J
JiabinYang 已提交
249
            is_sparse=is_sparse,
J
JiabinYang 已提交
250 251 252 253
            size=[3, 3],
            param_attr=fluid.ParamAttr(initializer=fluid.initializer.Normal(
                scale=1 / math.sqrt(3))))

J
JiabinYang 已提交
254 255
        cost = fluid.layers.hsigmoid(
            input=emb,
J
JiabinYang 已提交
256
            label=label,
257
            bias_attr=True,
258
            num_classes=3,
259 260 261
            path_table=path_table,
            path_code=path_code,
            is_custom=True,
J
JiabinYang 已提交
262
            is_sparse=is_sparse)
J
JiabinYang 已提交
263 264 265 266 267

        avg_cost = fluid.layers.reduce_mean(cost)

        return avg_cost, data_list

J
JiabinYang 已提交
268 269
    def training_test(self, is_sparse):
        with fluid.program_guard(fluid.Program(), fluid.Program()):
L
Leo Chen 已提交
270
            paddle.manual_seed(1)
J
JiabinYang 已提交
271 272
            start_up = fluid.default_startup_program()
            x = np.arange(6).reshape(6)
273 274 275
            path_table = np.array([(1, 2, -1), (1, 2, -1)]).astype('int64')
            path_code = np.array([(1, 0, -1), (0, 0, -1)]).astype('int64')
            label = np.array([1, 4]).astype('int64')
J
JiabinYang 已提交
276 277 278 279 280 281 282 283 284 285 286 287

            loss, data_list = self.hs_net_conf(is_sparse)
            optimizer = fluid.optimizer.SGD(learning_rate=1e-3)
            optimizer.minimize(loss)

            main_program = fluid.default_main_program()
            place = fluid.CPUPlace()
            feeder = fluid.DataFeeder(feed_list=data_list, place=place)
            exe = fluid.Executor(place)

            exe.run(start_up)
            result = list()
J
JiabinYang 已提交
288
            for i in range(10):
289 290
                data = [([[x[i % 2]]], [list(path_table[i % 2])],
                         [list(path_code[i % 2])], [label[i % 2]])]
J
JiabinYang 已提交
291

J
JiabinYang 已提交
292 293 294
                loss_val = exe.run(main_program,
                                   feed=feeder.feed(data),
                                   fetch_list=[loss])
J
JiabinYang 已提交
295 296 297 298 299 300 301 302 303
                result.append(loss_val)
        return result

    def test_hs_grad_with_sparse(self):
        dense_result = self.training_test(is_sparse=False)
        sparse_result = self.training_test(is_sparse=True)
        assert (dense_result == sparse_result)


304 305 306
@skip_check_grad_ci(
    reason="[skip shape check] The huffman tree is structed separately. It will be complicated if use large shape."
)
J
JiabinYang 已提交
307 308 309 310 311 312
class TestHSigmoidOpWithCostumTree(OpTest):
    def setUp(self):
        self.op_type = "hierarchical_sigmoid"
        num_classes = 6  #using 1,2,3,4,5,6 to build a huffman tree and select 1,2,5,6 as sample
        feature_size = 8
        batch_size = 4
313 314
        x = np.random.uniform(-1, 1, (batch_size, feature_size))
        w = np.random.uniform(-1, 1, (num_classes - 1, feature_size))
315 316 317 318 319 320 321 322 323
        label = np.array([0, 1, 4, 5]).astype('int64')
        path_table = np.array([
            (0, 2, -1, -1, -1), (0, 1, 3, -1, -1), (0, 1, 4, -1, -1), (0, 2, -1,
                                                                       -1, -1)
        ]).astype(
            'int64')  #np.array to store 1,2,5,6s' non-leaf path(root -> leaf)
        path_code = np.array(
            [(0, 0, -1, -1, -1), (1, 1, 1, -1, -1), (1, 0, 0, -1, -1),
             (0, 1, -1, -1, -1)]).astype('int64')  #np.array to store 
324
        bias = np.random.random((num_classes - 1, 1))
J
JiabinYang 已提交
325 326 327 328
        self.attrs = {'num_classes': num_classes, 'is_sparse': False}
        self.inputs = {
            'X': x,
            'W': w,
329
            'PathTable': path_table,
330
            'PathCode': path_code,
J
JiabinYang 已提交
331 332 333
            'Label': label,
            'Bias': bias
        }
334 335
        pre_output, out = hsigmoidWithCustomTree(x, w, path_table, path_code,
                                                 label, bias, num_classes)
J
JiabinYang 已提交
336 337 338 339 340 341 342 343
        self.outputs = {'PreOut': pre_output, 'Out': out}

    def test_check_output(self):
        self.check_output()

    def test_check_grad(self):
        self.check_grad(['Bias', 'X', 'W'], ['Out'], no_grad_set=set('Label'))

Y
Yancey1989 已提交
344

345 346 347
@skip_check_grad_ci(
    reason="[skip shape check] The huffman tree is structed separately. It will be complicated if use large shape."
)
348 349 350 351 352 353
class TestHSigmoidOpWithCostumTreeWithoutBias(OpTest):
    def setUp(self):
        self.op_type = "hierarchical_sigmoid"
        num_classes = 6  #using 1,2,3,4,5,6 to build a huffman tree and select 1,2,5,6 as sample
        feature_size = 8
        batch_size = 4
354 355
        x = np.random.uniform(-1, 1, (batch_size, feature_size))
        w = np.random.uniform(-1, 1, (num_classes - 1, feature_size))
356 357 358 359 360 361 362 363 364
        label = np.array([0, 1, 4, 5]).astype('int64')
        path_table = np.array([
            (0, 2, -1, -1, -1), (0, 1, 3, -1, -1), (0, 1, 4, -1, -1), (0, 2, -1,
                                                                       -1, -1)
        ]).astype(
            'int64')  #np.array to store 1,2,5,6s' non-leaf path(root -> leaf)
        path_code = np.array(
            [(0, 0, -1, -1, -1), (1, 1, 1, -1, -1), (1, 0, 0, -1, -1),
             (0, 1, -1, -1, -1)]).astype('int64')  #np.array to store 
365 366 367 368 369
        # bias = np.random.random((num_classes - 1, 1)).astype("float32")
        self.attrs = {'num_classes': num_classes, 'is_sparse': False}
        self.inputs = {
            'X': x,
            'W': w,
370
            'PathTable': path_table,
371
            'PathCode': path_code,
372 373 374 375 376
            'Label': label,
        }
        pre_output, out = hsigmoidWithCustomTree(
            x=x,
            w=w,
377 378
            path_table=path_table,
            path_code=path_code,
379 380 381 382 383 384 385 386 387 388 389 390
            label=label,
            bias=None,
            num_classes=num_classes)
        self.outputs = {'PreOut': pre_output, 'Out': out}

    def test_check_output(self):
        self.check_output()

    def test_check_grad(self):
        self.check_grad(['X', 'W'], ['Out'], no_grad_set=set('Label'))


391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412
class TestHSigmoidOpError(unittest.TestCase):
    def test_errors(self):
        with program_guard(Program()):
            label = fluid.data('label', [4, 1], 'int64')
            # The input type must be Variable.
            self.assertRaises(TypeError, fluid.layers.hsigmoid, 1, label, 2)
            # The input dtype must be float16, float32, float64.
            x_int32 = fluid.data(name='x_int32', shape=[4, 3], dtype='int32')
            self.assertRaises(TypeError, fluid.layers.hsigmoid, x_int32, label,
                              2)
            # support the input dtype is float32
            x_fp32 = fluid.data(name='x_fp32', shape=[4, 3], dtype='float32')
            fluid.layers.hsigmoid(x_fp32, label, 2)

            # The label type must be Variable.
            self.assertRaises(TypeError, fluid.layers.hsigmoid, x_fp32, 1, 2)
            # The label dtype must be int64.
            label_int32 = fluid.data('label_int32', [4, 1], 'int32')
            self.assertRaises(TypeError, fluid.layers.hsigmoid, x_fp32,
                              label_int32, 2)


Y
Yancey1989 已提交
413 414
if __name__ == '__main__':
    unittest.main()