提交 fb83c4b8 编写于 作者: Q Qiao Longfei

fix ctr reader

上级 75594b98
...@@ -21,10 +21,10 @@ class CriteoDataset(Dataset): ...@@ -21,10 +21,10 @@ class CriteoDataset(Dataset):
for line in f: for line in f:
line_idx += 1 line_idx += 1
if is_train and line_idx > self.train_idx_: if is_train and line_idx > self.train_idx_:
continue break
elif not is_train and line_idx <= self.train_idx_: elif not is_train and line_idx <= self.train_idx_:
continue continue
if trainer_id > 0 and line_idx % trainer_num != trainer_id: if line_idx % trainer_num != trainer_id:
continue continue
features = line.rstrip('\n').split('\t') features = line.rstrip('\n').split('\t')
dense_feature = [] dense_feature = []
......
...@@ -138,7 +138,7 @@ def train(): ...@@ -138,7 +138,7 @@ def train():
if args.is_local: if args.is_local:
logger.info("run local training") logger.info("run local training")
main_program = fluid.default_main_program() main_program = fluid.default_main_program()
train_loop(args, main_program, data_list, loss, auc_var, batch_auc_var, 1, -1) train_loop(args, main_program, data_list, loss, auc_var, batch_auc_var, 1, 0)
else: else:
logger.info("run dist training") logger.info("run dist training")
t = fluid.DistributeTranspiler() t = fluid.DistributeTranspiler()
...@@ -154,7 +154,7 @@ def train(): ...@@ -154,7 +154,7 @@ def train():
logger.info("run trainer") logger.info("run trainer")
train_prog = t.get_trainer_program() train_prog = t.get_trainer_program()
train_loop(args, train_prog, data_list, loss, auc_var, batch_auc_var, train_loop(args, train_prog, data_list, loss, auc_var, batch_auc_var,
args.trainers, args.trainer_id + 1) args.trainers, args.trainer_id)
if __name__ == '__main__': if __name__ == '__main__':
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册