From 7a64d48f55c53a0e278b5c622753504a4662814f Mon Sep 17 00:00:00 2001 From: tangwei12 Date: Thu, 15 Nov 2018 12:36:03 +0800 Subject: [PATCH] fix test_save_load with pickle (#14410) * fix test_save_load with pickle test=develop * fix test_save_load with pickle test=develop * fix test_save_load with pickle test=develop --- python/paddle/fluid/tests/unittests/dist_save_load.py | 6 +++++- .../paddle/fluid/tests/unittests/test_dist_save_load.py | 8 ++++---- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/python/paddle/fluid/tests/unittests/dist_save_load.py b/python/paddle/fluid/tests/unittests/dist_save_load.py index edc6055005..cf62817956 100644 --- a/python/paddle/fluid/tests/unittests/dist_save_load.py +++ b/python/paddle/fluid/tests/unittests/dist_save_load.py @@ -26,6 +26,7 @@ from multiprocessing import Process from functools import reduce import numpy as np +import pickle import unittest import six @@ -166,7 +167,10 @@ class TestDistSaveLoad2x2(TestDistSimnetBow2x2): io.save_persistables(startup_exe, model_dir, trainer_prog) var = np.array(fluid.global_scope().find_var('__fc_b__').get_tensor()) - print(np.ravel(var).tolist()) + if six.PY2: + print(pickle.dumps(np.ravel(var).tolist())) + else: + sys.stdout.buffer.write(pickle.dumps(np.ravel(var).tolist())) if __name__ == "__main__": diff --git a/python/paddle/fluid/tests/unittests/test_dist_save_load.py b/python/paddle/fluid/tests/unittests/test_dist_save_load.py index 03066fee48..ea2b554dac 100644 --- a/python/paddle/fluid/tests/unittests/test_dist_save_load.py +++ b/python/paddle/fluid/tests/unittests/test_dist_save_load.py @@ -65,14 +65,14 @@ class TestDistSaveLoadDense2x2(TestDistBase): shutil.rmtree(model_dir) - local_np = np.array(eval(local_var[0])) - train0_np = np.array(eval(tr0_var[0])) - train1_np = np.array(eval(tr1_var[0])) + local_np = np.array(local_var) + train0_np = np.array(tr0_var) + train1_np = np.array(tr1_var) + self.assertAlmostEqual(local_np.all(), train0_np.all(), delta=delta) self.assertAlmostEqual(local_np.all(), train1_np.all(), delta=delta) self.assertAlmostEqual(train0_np.all(), train1_np.all(), delta=delta) - @unittest.skip(reason="CI fail") def test_dist(self): need_envs = { "IS_DISTRIBUTED": '0', -- GitLab