From eba7177475815f6ecb231101c558d693e972f6fb Mon Sep 17 00:00:00 2001 From: tangwei12 Date: Fri, 17 Aug 2018 11:03:37 +0800 Subject: [PATCH] add unit test and code fix --- paddle/fluid/API.spec | 2 +- python/paddle/fluid/io.py | 11 ++--- .../tests/unittests/test_dist_transpiler.py | 49 +++++++++++++++++++ 3 files changed, 55 insertions(+), 7 deletions(-) diff --git a/paddle/fluid/API.spec b/paddle/fluid/API.spec index 964be52ee9..075634a306 100644 --- a/paddle/fluid/API.spec +++ b/paddle/fluid/API.spec @@ -77,8 +77,8 @@ paddle.fluid.io.save_persistables ArgSpec(args=['executor', 'dirname', 'main_pro paddle.fluid.io.load_vars ArgSpec(args=['executor', 'dirname', 'main_program', 'vars', 'predicate', 'filename'], varargs=None, keywords=None, defaults=(None, None, None, None)) paddle.fluid.io.load_params ArgSpec(args=['executor', 'dirname', 'main_program', 'filename'], varargs=None, keywords=None, defaults=(None, None)) paddle.fluid.io.load_persistables ArgSpec(args=['executor', 'dirname', 'main_program', 'filename'], varargs=None, keywords=None, defaults=(None, None)) -paddle.fluid.io.load_inference_model ArgSpec(args=['dirname', 'executor', 'model_filename', 'params_filename', 'pserver_endpoints'], varargs=None, keywords=None, defaults=(None, None)) paddle.fluid.io.save_inference_model ArgSpec(args=['dirname', 'feeded_var_names', 'target_vars', 'executor', 'main_program', 'model_filename', 'params_filename', 'export_for_deployment'], varargs=None, keywords=None, defaults=(None, None, None, True)) +paddle.fluid.io.load_inference_model ArgSpec(args=['dirname', 'executor', 'model_filename', 'params_filename', 'pserver_endpoints'], varargs=None, keywords=None, defaults=(None, None)) paddle.fluid.io.get_inference_program ArgSpec(args=['target_vars', 'main_program'], varargs=None, keywords=None, defaults=(None,)) paddle.fluid.initializer.ConstantInitializer.__init__ ArgSpec(args=['self', 'value', 'force_cpu'], varargs=None, keywords=None, defaults=(0.0, False)) paddle.fluid.initializer.UniformInitializer.__init__ ArgSpec(args=['self', 'low', 'high', 'seed'], varargs=None, keywords=None, defaults=(-1.0, 1.0, 0)) diff --git a/python/paddle/fluid/io.py b/python/paddle/fluid/io.py index f4a1fa78b8..049c0e40e6 100644 --- a/python/paddle/fluid/io.py +++ b/python/paddle/fluid/io.py @@ -672,12 +672,11 @@ def save_inference_model(dirname, save_persistables(executor, dirname, inference_program, params_filename) # if there is lookup table, the trainer 0 will notify all pserver to save. - if main_program._is_distributed and main_program._is_chief: - if main_program._distributed_lookup_table: - lookup_table_filename = os.path.join(dirname, "__lookup_table__") - _save_lookup_tables_by_notify( - executor, lookup_table_filename, - main_program._distributed_lookup_table, main_program._endpoints) + if main_program._is_distributed and main_program._is_chief and main_program._distributed_lookup_table: + lookup_table_filename = os.path.join(dirname, "__lookup_table__") + _save_lookup_tables_by_notify(executor, lookup_table_filename, + main_program._distributed_lookup_table, + main_program._endpoints) def load_inference_model(dirname, diff --git a/python/paddle/fluid/tests/unittests/test_dist_transpiler.py b/python/paddle/fluid/tests/unittests/test_dist_transpiler.py index 0543e62381..b88fd2707d 100644 --- a/python/paddle/fluid/tests/unittests/test_dist_transpiler.py +++ b/python/paddle/fluid/tests/unittests/test_dist_transpiler.py @@ -536,6 +536,19 @@ class TestAsyncDistLookupTable(TestDistLookupTableBase): self.assertEqual([op.type for op in trainer.blocks[0].ops], ops) +class TestDistArgsInProgram(TestDistLookupTableBase): + def net_conf(self): + self.network_with_table(is_sparse=True, is_distributed=True) + + def transpiler_test_impl(self): + config = fluid.DistributeTranspilerConfig() + pserver1, _ = self.get_pserver(self.pserver1_ep, config, False) + + self.assertTrue(pserver1._is_chief) + self.assertTrue(pserver1._is_distributed) + self.assertEqual(pserver1._distributed_lookup_table) + + class TestRMSPropOptimizer(TranspilerTest): def net_conf(self): x = fluid.layers.data(name='x', shape=[1000], dtype='float32') @@ -566,5 +579,41 @@ class TestRMSPropOptimizer(TranspilerTest): self.assertEqual(moment_var.shape, (500, 1000)) +class TestLoadSliceVar(TranspilerTest): + def net_conf(self): + x = fluid.layers.data(name='x', shape=[1000], dtype='float32') + y_predict = fluid.layers.fc(input=x, + size=1000, + act=None, + param_attr=fluid.ParamAttr(name='fc_w'), + bias_attr=fluid.ParamAttr(name='fc_b')) + y = fluid.layers.data(name='y', shape=[1], dtype='float32') + cost = fluid.layers.square_error_cost(input=y_predict, label=y) + avg_cost = fluid.layers.mean(cost) + optimizer = fluid.optimizer.RMSProp(learning_rate=0.1) + optimizer.minimize(avg_cost) + return + + def transpiler_test_impl(self): + pserver, _ = self.get_pserver(self.pserver1_ep) + pserver2, _ = self.get_pserver(self.pserver2_ep) + + self.assertTrue(pserver._slice_vars_and_atts) + self.assertTrue(pserver2._slice_vars_and_atts) + + for idx in xrange(len(pserver._slice_vars_and_atts)): + self.assertEqual(pserver._slice_vars_and_atts[idx][0], + pserver2._slice_vars_and_atts[idx][0]) + + total_numel = reduce(lambda x, y: x * y, + pserver._slice_vars_and_atts[idx][0].shape) + self.assertEqual( + total_numel, + reduce(lambda x, y: x * y, + pserver._slice_vars_and_atts[idx][2].shape) + reduce( + lambda x, y: x * y, + pserver2._slice_vars_and_atts[idx][2].shape)) + + if __name__ == "__main__": unittest.main() -- GitLab