提交 1d11af72 编写于 作者: L LDOUBLEV

fix shuffle not work

上级 09fd94e7
...@@ -65,6 +65,7 @@ def build_dataloader(config, mode, device, logger, seed=None): ...@@ -65,6 +65,7 @@ def build_dataloader(config, mode, device, logger, seed=None):
loader_config = config[mode]['loader'] loader_config = config[mode]['loader']
batch_size = loader_config['batch_size_per_card'] batch_size = loader_config['batch_size_per_card']
drop_last = loader_config['drop_last'] drop_last = loader_config['drop_last']
shuffle = loader_config['shuffle']
num_workers = loader_config['num_workers'] num_workers = loader_config['num_workers']
if 'use_shared_memory' in loader_config.keys(): if 'use_shared_memory' in loader_config.keys():
use_shared_memory = loader_config['use_shared_memory'] use_shared_memory = loader_config['use_shared_memory']
...@@ -75,14 +76,14 @@ def build_dataloader(config, mode, device, logger, seed=None): ...@@ -75,14 +76,14 @@ def build_dataloader(config, mode, device, logger, seed=None):
batch_sampler = DistributedBatchSampler( batch_sampler = DistributedBatchSampler(
dataset=dataset, dataset=dataset,
batch_size=batch_size, batch_size=batch_size,
shuffle=False, shuffle=shuffle,
drop_last=drop_last) drop_last=drop_last)
else: else:
#Distribute data to single card #Distribute data to single card
batch_sampler = BatchSampler( batch_sampler = BatchSampler(
dataset=dataset, dataset=dataset,
batch_size=batch_size, batch_size=batch_size,
shuffle=False, shuffle=shuffle,
drop_last=drop_last) drop_last=drop_last)
data_loader = DataLoader( data_loader = DataLoader(
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册