diff --git a/python/paddle/fluid/tests/unittests/test_lookup_sparse_table_fuse_ops.py b/python/paddle/fluid/tests/unittests/test_lookup_sparse_table_fuse_ops.py index 0407ea53382af56c08c25ecddc161e6f260fb423..b2bf6e546e724fbe61e78ce792c951a4f8aeb40a 100644 --- a/python/paddle/fluid/tests/unittests/test_lookup_sparse_table_fuse_ops.py +++ b/python/paddle/fluid/tests/unittests/test_lookup_sparse_table_fuse_ops.py @@ -30,13 +30,30 @@ class TestLookupTableFuseOp(unittest.TestCase): def check_with_place(self, place): scope = fluid.global_scope() - init_program = fluid.Program() + scope.var("LearningRate").get_tensor().set([0.01], place) + scope.var("Ids").get_tensor().set([i for i in range(100)], place) - lr = scope.var("LearningRate") - lr.get_tensor().set([0.01], place) + init_program = fluid.Program() - ids = [i for i in range(100)] - out = scope.var("output") + lr = init_program.global_block().create_var( + name="LearningRate", + persistable=True, + type=fluid.core.VarDesc.VarType.LOD_TENSOR, + shape=[1], + dtype="float32") + + ids = init_program.global_block().create_var( + name="Ids", + persistable=True, + type=fluid.core.VarDesc.VarType.LOD_TENSOR, + shape=[100], + dtype="int64") + + output = init_program.global_block().create_var( + name="output", + type=fluid.core.VarDesc.VarType.LOD_TENSOR, + shape=[100, 8], + dtype="float32") metas = [] metas.append( @@ -55,32 +72,34 @@ class TestLookupTableFuseOp(unittest.TestCase): init_program.global_block().append_op( type="lookup_sparse_table_read", inputs={"Ids": ids}, - outputs={"Out": out}, + outputs={"Out": output}, attrs={ "tablename": "embedding_1.block0", "init": True, - "value_names": ["Param", "Moment1", "Moment2"], + "value_names": ["Param"], }) init_program.global_block().append_op( type="lookup_sparse_table_read", inputs={"Ids": ids}, - outputs={"Out": out}, + outputs={"Out": output}, attrs={ "tablename": "embedding_2.block0", "init": True, "value_names": ["Param"], }) - executor = fluid.Executor(fluid.CPUPlace()) + executor = fluid.Executor(place) executor.run(init_program) training_program = fluid.Program() - rows = [0, 1, 2, 3, 4, 5, 6] - row_numel = 7 + scope.var('Beta1Pow').get_tensor().set([0], place) + scope.var('Beta2Pow').get_tensor().set([0], place) - w_selected_rows = scope.var('W').get_selected_rows() + rows = [0, 1, 2, 3, 4, 5, 6] + row_numel = 8 + w_selected_rows = scope.var('Grad').get_selected_rows() w_selected_rows.set_height(len(rows)) w_selected_rows.set_rows(rows) w_array = np.ones((len(rows), row_numel)).astype("float32") @@ -89,11 +108,44 @@ class TestLookupTableFuseOp(unittest.TestCase): w_tensor = w_selected_rows.get_tensor() w_tensor.set(w_array, place) + lr = training_program.global_block().create_var( + name="LearningRate", + persistable=True, + type=fluid.core.VarDesc.VarType.LOD_TENSOR, + shape=[1], + dtype="float32") + + grads = training_program.global_block().create_var( + name="Grad", + persistable=True, + type=fluid.core.VarDesc.VarType.SELECTED_ROWS, + shape=[100, 8], + dtype="float32") + + beta1 = training_program.global_block().create_var( + name="Beta1Pow", + persistable=True, + type=fluid.core.VarDesc.VarType.LOD_TENSOR, + shape=[1], + dtype="float32") + + beta2 = training_program.global_block().create_var( + name="Beta2Pow", + persistable=True, + type=fluid.core.VarDesc.VarType.LOD_TENSOR, + shape=[1], + dtype="float32") + training_program.global_block().append_op( type="lookup_sparse_table_fuse_adam", - inputs={"Grad": ids, - "LearningRate": lr}, - outputs={"Out": out}, + inputs={ + "Grad": grads, + "LearningRate": lr, + "Beta1Pow": beta1, + "Beta2Pow": beta2, + }, + outputs={"Beta1PowOut": beta1, + "Beta2PowOut": beta2}, attrs={ "is_entry": False, "tablename": "embedding_1.block0", @@ -102,15 +154,16 @@ class TestLookupTableFuseOp(unittest.TestCase): training_program.global_block().append_op( type="lookup_sparse_table_fuse_sgd", - inputs={"Grad": ids, + inputs={"Grad": grads, "LearningRate": lr}, - outputs={"Out": out}, attrs={ "is_entry": False, "tablename": "embedding_2.block0", "value_names": ["Param"], }) + executor.run(training_program) + if __name__ == "__main__": unittest.main()