diff --git a/paddle/phi/core/flags.cc b/paddle/phi/core/flags.cc index 526457499c8844ddca40587eaba2ffa7d3202896..680661c890519a076af746bf34084ffe4d655a7c 100644 --- a/paddle/phi/core/flags.cc +++ b/paddle/phi/core/flags.cc @@ -1198,11 +1198,11 @@ PADDLE_DEFINE_EXPORTED_bool(trt_ibuilder_cache, * mmap_allocator related FLAG * Name: use_shm_cache * Since Version: 2.5.0 - * Value Range: bool, default=true + * Value Range: bool, default=false * Example: * Note: . If True, mmap_allocator will cache shm file to decrease munmap * operation. */ PADDLE_DEFINE_EXPORTED_bool(use_shm_cache, - true, + false, "Use shm cache in mmap_allocator."); diff --git a/python/paddle/fluid/dataloader/dataloader_iter.py b/python/paddle/fluid/dataloader/dataloader_iter.py index c7c49c794a1017b1788a489cde6030d4ca374c1b..66c6dff6c191348372ab85c779bb3013862e020f 100644 --- a/python/paddle/fluid/dataloader/dataloader_iter.py +++ b/python/paddle/fluid/dataloader/dataloader_iter.py @@ -410,13 +410,22 @@ class _DataLoaderIterMultiProcess(_DataLoaderIterBase): # Note(zhangbo): shm_buffer_size is used for MemoryMapAllocationPool. # MemoryMapAllocationPool is used to cache and reuse shm, thus reducing munmap in dataloader. # For more details, please see: paddle/fluid/memory/allocation/mmap_allocator.h - try: - self._worker_shm_buffer_size = (2 + 1) * len(self._dataset[0]) - except: + if os.environ.get('FLAGS_use_shm_cache', False) in [ + 1, + '1', + True, + 'True', + 'true', + ]: + try: + self._worker_shm_buffer_size = (2 + 1) * len(self._dataset[0]) + except: + self._worker_shm_buffer_size = 0 + warnings.warn( + "Setting the shm cache buffer size to 0, equivalent to not using the shm cache policy." + ) + else: self._worker_shm_buffer_size = 0 - warnings.warn( - "Setting the shm cache buffer size to 0, equivalent to not using the shm cache policy." - ) self._main_thread_shm_buffer_size = ( (self._worker_shm_buffer_size) * 2 * self._num_workers )