From 504db4f5ac34b48e783d26d0ff6c093e12adeb33 Mon Sep 17 00:00:00 2001 From: Ghost Screaming Date: Mon, 16 Jan 2023 15:30:06 +0800 Subject: [PATCH] Fix paddle save for multi-processing (#49657) * Fix bug of reduce_sum op. When input.numel() > INT32_MAX, its result is wrong. * Remove climits. * Fix bug of paddle.save. It may cause bug for saving sharded optimizer state_dict() in parallel. --- python/paddle/framework/io.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/paddle/framework/io.py b/python/paddle/framework/io.py index 5b843792640..432d1088b10 100644 --- a/python/paddle/framework/io.py +++ b/python/paddle/framework/io.py @@ -777,7 +777,7 @@ def save(obj, path, protocol=4, **configs): # 2. save object dirname = os.path.dirname(path) if dirname and not os.path.exists(dirname): - os.makedirs(dirname) + os.makedirs(dirname, exist_ok=True) elif not _is_memory_buffer(path): raise ValueError( "only supports saving objects to file and `BytesIO`, but got {}".format( @@ -853,7 +853,7 @@ def _legacy_save(obj, path, protocol=2): # 2. save object dirname = os.path.dirname(path) if dirname and not os.path.exists(dirname): - os.makedirs(dirname) + os.makedirs(dirname, exist_ok=True) if isinstance(obj, dict): saved_obj = _build_saved_state_dict(obj) -- GitLab