diff --git a/python/paddle/fluid/tests/unittests/launch_function_helper.py b/python/paddle/fluid/tests/unittests/launch_function_helper.py index 13041827ffeabd3d6b79e4f34a67bd09624e54f6..046268444018799ca4d7f5530cbb6b1c707e062f 100644 --- a/python/paddle/fluid/tests/unittests/launch_function_helper.py +++ b/python/paddle/fluid/tests/unittests/launch_function_helper.py @@ -15,7 +15,8 @@ from multiprocessing import Pool, Process import os import socket from contextlib import closing -import psutil +import time +import sys def launch_func(func, env_dict): @@ -25,19 +26,36 @@ def launch_func(func, env_dict): return proc -def wait(procs, timeout=None): - # wait - decents = [] +def wait(procs, timeout=30): + error = False + begin = time.time() + while True: + alive = False + for p in procs: + p.join(timeout=10) + if p.exitcode is None: + alive = True + continue + elif p.exitcode != 0: + error = True + break + + if not alive: + break + + if error: + break + + if timeout is not None and time.time() - begin >= timeout: + error = True + break + for p in procs: - for child in psutil.Process(p.pid).children(recursive=True): - decents.append(child) - - gone, alive = psutil.wait_procs(decents, timeout=timeout) - for p in alive: - p.kill() - for p in gone: - if p.returncode != 0: - sys.exit(1) + if p.is_alive(): + p.terminate() + + if error: + sys.exit(1) def _find_free_port(port_set): diff --git a/python/paddle/fluid/tests/unittests/test_fleet_graph_execution_meta_optimizer.py b/python/paddle/fluid/tests/unittests/test_fleet_graph_execution_meta_optimizer.py index 9eec73116cc283b58d3ee39cefb9256e12d4ef15..927c155ff1116a821a13730a9d2a779a7c68b254 100644 --- a/python/paddle/fluid/tests/unittests/test_fleet_graph_execution_meta_optimizer.py +++ b/python/paddle/fluid/tests/unittests/test_fleet_graph_execution_meta_optimizer.py @@ -190,7 +190,7 @@ class TestFleetGraphExecutionMetaOptimizer(unittest.TestCase): avg_cost = paddle.fluid.layers.mean(x=cost) strategy = paddle.distributed.fleet.DistributedStrategy() - optimizer = paddle.optimizer.SGD(learning_rate=0.01) + optimizer = paddle.fluid.optimizer.SGD(learning_rate=0.01) optimizer = fleet.distributed_optimizer( optimizer, strategy=strategy) optimizer.minimize(avg_cost) diff --git a/python/requirements.txt b/python/requirements.txt index e278a1b824cc3829f1b67bc3a0cf643840990bb9..c8d3b2af1794bb0858b187d6a4c641322f50cdd1 100644 --- a/python/requirements.txt +++ b/python/requirements.txt @@ -21,4 +21,3 @@ objgraph astor pathlib netifaces -psutil