From fe3a5337a54792d46b2cb876ac7df01de106efcf Mon Sep 17 00:00:00 2001 From: wawltor Date: Fri, 18 Dec 2020 10:47:25 +0800 Subject: [PATCH] add the log format for the bert benchmark (#5101) --- PaddleNLP/benchmark/bert/run_pretrain.py | 2 +- PaddleNLP/benchmark/bert/run_pretrain_single.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/PaddleNLP/benchmark/bert/run_pretrain.py b/PaddleNLP/benchmark/bert/run_pretrain.py index 4ecea128..ce5e2d1e 100644 --- a/PaddleNLP/benchmark/bert/run_pretrain.py +++ b/PaddleNLP/benchmark/bert/run_pretrain.py @@ -326,7 +326,7 @@ def do_train(args): if global_step % args.logging_steps == 0: time_cost = time.time() - tic_train print( - "global step %d, epoch: %d, batch: %d, loss: %f, speed: %.2f step/s, ips :%.2f sequences/s" + "global step %d, epoch: %d, batch: %d, loss: %f, speed: %.2f step/s, ips: %.2f sequences/s" % (global_step, epoch, step, loss_return[0], args.logging_steps / time_cost, args.logging_steps * args.batch_size / time_cost)) diff --git a/PaddleNLP/benchmark/bert/run_pretrain_single.py b/PaddleNLP/benchmark/bert/run_pretrain_single.py index 48b11162..1bd21049 100644 --- a/PaddleNLP/benchmark/bert/run_pretrain_single.py +++ b/PaddleNLP/benchmark/bert/run_pretrain_single.py @@ -276,7 +276,7 @@ def do_train(args): if global_step % args.logging_steps == 0: time_cost = time.time() - tic_train print( - "global step %d, epoch: %d, batch: %d, loss: %f, speed: %.2f step/s, ips :%.2f sequences/s" + "global step %d, epoch: %d, batch: %d, loss: %f, speed: %.2f step/s, ips: %.2f sequences/s" % (global_step, epoch, step, loss_return[0], args.logging_steps / time_cost, args.logging_steps * args.batch_size / time_cost)) -- GitLab