diff --git a/paddle/fluid/operators/math/matrix_bit_code.h b/paddle/fluid/operators/math/matrix_bit_code.h index 9d8d0de53a72332f25fb7f949cd2bb9cb3055fc5..7f507999fda0eb576d6d1da69da6c2e4d8a7459a 100644 --- a/paddle/fluid/operators/math/matrix_bit_code.h +++ b/paddle/fluid/operators/math/matrix_bit_code.h @@ -98,7 +98,7 @@ inline int clz(const T& value) { } } -inline size_t FindLastSet(size_t x) { return sizeof(size_t) * 8 - clz(x); } +inline size_t FindLastSet(size_t x) { return 1 + sizeof(size_t) * 8 - clz(x); } #endif // !_WIN32 class SimpleCode { public: diff --git a/python/paddle/fluid/tests/unittests/CMakeLists.txt b/python/paddle/fluid/tests/unittests/CMakeLists.txt index d26f2de0340d91a0a88097a76b3dbd3c419831c9..366951aa7129790ad162eca814ee7a6832d557fd 100644 --- a/python/paddle/fluid/tests/unittests/CMakeLists.txt +++ b/python/paddle/fluid/tests/unittests/CMakeLists.txt @@ -58,7 +58,6 @@ if(WIN32) LIST(REMOVE_ITEM TEST_OPS test_debugger) list(REMOVE_ITEM TEST_OPS test_desc_clone) list(REMOVE_ITEM TEST_OPS test_fake_init_op) - list(REMOVE_ITEM TEST_OPS test_hsigmoid_op) list(REMOVE_ITEM TEST_OPS test_merge_ids_op) list(REMOVE_ITEM TEST_OPS test_split_ids_op) list(REMOVE_ITEM TEST_OPS test_program_code) diff --git a/python/paddle/fluid/tests/unittests/test_hsigmoid_op.py b/python/paddle/fluid/tests/unittests/test_hsigmoid_op.py index 421a6c695364dc467d9faa7355c4ba51e8d61d7b..5777bb3c6f5e34f035c32ed963906b5ccc03ba85 100644 --- a/python/paddle/fluid/tests/unittests/test_hsigmoid_op.py +++ b/python/paddle/fluid/tests/unittests/test_hsigmoid_op.py @@ -70,9 +70,9 @@ def hsigmoid(x, w, label, bias, num_classes): batch_size = x.shape[0] code_length = find_latest_set(num_classes - 1) code_table = [0 for _ in range(code_length)] - pre_output = np.zeros((batch_size, code_length)) - pre_sum = np.zeros((batch_size, 1)) - out = np.zeros((batch_size, 1)) + pre_output = np.zeros((batch_size, code_length)).astype('float64') + pre_sum = np.zeros((batch_size, 1)).astype('float64') + out = np.zeros((batch_size, 1)).astype('float64') for i in range(batch_size): code_table = CodeTable(num_classes, label[i]) length = code_table.get_length() @@ -105,9 +105,9 @@ def hsigmoid(x, w, label, bias, num_classes): def hsigmoid_grad(x, w, label, bias, num_classes): batch_size = x.shape[0] - dx = np.zeros(x.shape) - dw = np.zeros(w.shape) - db = np.zeros(bias.shape) + dx = np.zeros(x.shape).astype('float64') + dw = np.zeros(w.shape).astype('float64') + db = np.zeros(bias.shape).astype('float64') for i in range(batch_size): code_table = CodeTable(num_classes, label[i]) length = code_table.get_length() @@ -133,9 +133,9 @@ def hsigmoidWithCustomTree(x, w, path_table, path_code, label, bias, code_length = len(path_table[0]) code_table = [0 for _ in range(code_length)] # init pre_out with shape [N, code_length] - pre_output = np.zeros((batch_size, code_length)) - pre_sum = np.zeros((batch_size, 1)) - out = np.zeros((batch_size, 1)) + pre_output = np.zeros((batch_size, code_length)).astype('float64') + pre_sum = np.zeros((batch_size, 1)).astype('float64') + out = np.zeros((batch_size, 1)).astype('float64') if isinstance(bias, np.ndarray): for i in range(batch_size): code_table = CodeTableWithCustomTree(path_table, path_code, i) @@ -173,10 +173,13 @@ class TestHSigmoidOp(OpTest): num_classes = 101 feature_size = 5 batch_size = 20 - x = np.random.uniform(-1, 1, (batch_size, feature_size)) - w = np.random.uniform(-1, 1, (num_classes - 1, feature_size)) - label = np.random.randint(0, num_classes, (batch_size, 1)) - bias = np.random.uniform(-1, 1, (num_classes - 1, 1)) + x = np.random.uniform(-1, 1, + (batch_size, feature_size)).astype('float64') + w = np.random.uniform(-1, 1, + (num_classes - 1, feature_size)).astype('float64') + label = np.random.randint(0, num_classes, + (batch_size, 1)).astype('int64') + bias = np.random.uniform(-1, 1, (num_classes - 1, 1)).astype('float64') self.attrs = {'num_classes': num_classes, 'is_sparse': False} self.inputs = {'X': x, 'W': w, 'Label': label, 'Bias': bias} pre_output, out = hsigmoid(x, w, label, bias, num_classes) @@ -189,7 +192,6 @@ class TestHSigmoidOp(OpTest): def test_check_grad(self): self.check_grad( ['X', 'W', 'Bias'], ['Out'], user_defined_grads=self.user_grads) - #self.check_grad(['X', 'W', 'Bias'], ['Out']) @skip_check_grad_ci( @@ -203,13 +205,15 @@ class TestHSigmoidOpSparse(OpTest): batch_size = 4 x = np.random.random((batch_size, feature_size)) w = np.random.random((num_classes - 1, feature_size)) - label = np.array([0, 1, 4, 5]) - path_table = np.array( - [(0, 2, -1, -1, -1), (0, 1, 3, -1, -1), (0, 1, 4, -1, -1), - (0, 2, -1, -1, - -1)]) #np.array to store 1,2,5,6s' non-leaf path(root -> leaf) - path_code = np.array([(0, 0, -1, -1, -1), (1, 1, 1, -1, -1), ( - 1, 0, 0, -1, -1), (0, 1, -1, -1, -1)]) #np.array to store + label = np.array([0, 1, 4, 5]).astype('int64') + path_table = np.array([ + (0, 2, -1, -1, -1), (0, 1, 3, -1, -1), (0, 1, 4, -1, -1), (0, 2, -1, + -1, -1) + ]).astype( + 'int64') #np.array to store 1,2,5,6s' non-leaf path(root -> leaf) + path_code = np.array( + [(0, 0, -1, -1, -1), (1, 1, 1, -1, -1), (1, 0, 0, -1, -1), + (0, 1, -1, -1, -1)]).astype('int64') #np.array to store bias = np.random.random((num_classes - 1, 1)) self.attrs = {'num_classes': num_classes, 'is_sparse': True} self.inputs = { @@ -265,9 +269,9 @@ class TestHSigmoidOpWithSparseGrad(unittest.TestCase): start_up = fluid.default_startup_program() start_up.random_seed = 1 # Fix random seed x = np.arange(6).reshape(6) - path_table = np.array([(1, 2, -1), (1, 2, -1)]) - path_code = np.array([(1, 0, -1), (0, 0, -1)]) - label = np.array([1, 4]) + path_table = np.array([(1, 2, -1), (1, 2, -1)]).astype('int64') + path_code = np.array([(1, 0, -1), (0, 0, -1)]).astype('int64') + label = np.array([1, 4]).astype('int64') loss, data_list = self.hs_net_conf(is_sparse) optimizer = fluid.optimizer.SGD(learning_rate=1e-3) @@ -307,13 +311,15 @@ class TestHSigmoidOpWithCostumTree(OpTest): batch_size = 4 x = np.random.uniform(-1, 1, (batch_size, feature_size)) w = np.random.uniform(-1, 1, (num_classes - 1, feature_size)) - label = np.array([0, 1, 4, 5]) - path_table = np.array( - [(0, 2, -1, -1, -1), (0, 1, 3, -1, -1), (0, 1, 4, -1, -1), - (0, 2, -1, -1, - -1)]) #np.array to store 1,2,5,6s' non-leaf path(root -> leaf) - path_code = np.array([(0, 0, -1, -1, -1), (1, 1, 1, -1, -1), ( - 1, 0, 0, -1, -1), (0, 1, -1, -1, -1)]) #np.array to store + label = np.array([0, 1, 4, 5]).astype('int64') + path_table = np.array([ + (0, 2, -1, -1, -1), (0, 1, 3, -1, -1), (0, 1, 4, -1, -1), (0, 2, -1, + -1, -1) + ]).astype( + 'int64') #np.array to store 1,2,5,6s' non-leaf path(root -> leaf) + path_code = np.array( + [(0, 0, -1, -1, -1), (1, 1, 1, -1, -1), (1, 0, 0, -1, -1), + (0, 1, -1, -1, -1)]).astype('int64') #np.array to store bias = np.random.random((num_classes - 1, 1)) self.attrs = {'num_classes': num_classes, 'is_sparse': False} self.inputs = { @@ -346,13 +352,15 @@ class TestHSigmoidOpWithCostumTreeWithoutBias(OpTest): batch_size = 4 x = np.random.uniform(-1, 1, (batch_size, feature_size)) w = np.random.uniform(-1, 1, (num_classes - 1, feature_size)) - label = np.array([0, 1, 4, 5]) - path_table = np.array( - [(0, 2, -1, -1, -1), (0, 1, 3, -1, -1), (0, 1, 4, -1, -1), - (0, 2, -1, -1, - -1)]) #np.array to store 1,2,5,6s' non-leaf path(root -> leaf) - path_code = np.array([(0, 0, -1, -1, -1), (1, 1, 1, -1, -1), ( - 1, 0, 0, -1, -1), (0, 1, -1, -1, -1)]) #np.array to store + label = np.array([0, 1, 4, 5]).astype('int64') + path_table = np.array([ + (0, 2, -1, -1, -1), (0, 1, 3, -1, -1), (0, 1, 4, -1, -1), (0, 2, -1, + -1, -1) + ]).astype( + 'int64') #np.array to store 1,2,5,6s' non-leaf path(root -> leaf) + path_code = np.array( + [(0, 0, -1, -1, -1), (1, 1, 1, -1, -1), (1, 0, 0, -1, -1), + (0, 1, -1, -1, -1)]).astype('int64') #np.array to store # bias = np.random.random((num_classes - 1, 1)).astype("float32") self.attrs = {'num_classes': num_classes, 'is_sparse': False} self.inputs = {