From 6eddb7a84e3a3d52a4e13d290485ac05e633e1c1 Mon Sep 17 00:00:00 2001 From: MRXLT Date: Tue, 31 Mar 2020 06:49:10 +0000 Subject: [PATCH] fix conflict --- python/examples/criteo_ctr/benchmark_batch.py | 3 +-- python/examples/criteo_ctr_with_cube/benchmark_batch.py | 3 +-- python/examples/imagenet/benchmark_batch.py | 3 +-- python/examples/imdb/benchmark_batch.py | 3 +-- 4 files changed, 4 insertions(+), 8 deletions(-) diff --git a/python/examples/criteo_ctr/benchmark_batch.py b/python/examples/criteo_ctr/benchmark_batch.py index 47b63a6a..1e4348c9 100644 --- a/python/examples/criteo_ctr/benchmark_batch.py +++ b/python/examples/criteo_ctr/benchmark_batch.py @@ -55,8 +55,7 @@ def single_func(idx, resource): for i in range(1, 27): feed_dict["sparse_{}".format(i - 1)] = data[0][i] feed_batch.append(feed_dict) - result = client.batch_predict( - feed_batch=feed_batch, fetch=fetch) + result = client.predict(feed=feed_batch, fetch=fetch) else: print("unsupport batch size {}".format(args.batch_size)) diff --git a/python/examples/criteo_ctr_with_cube/benchmark_batch.py b/python/examples/criteo_ctr_with_cube/benchmark_batch.py index b4b15892..f395d327 100755 --- a/python/examples/criteo_ctr_with_cube/benchmark_batch.py +++ b/python/examples/criteo_ctr_with_cube/benchmark_batch.py @@ -56,8 +56,7 @@ def single_func(idx, resource): feed_dict["embedding_{}.tmp_0".format(i - 1)] = data[0][ i] feed_batch.append(feed_dict) - result = client.batch_predict( - feed_batch=feed_batch, fetch=fetch) + result = client.batch_predict(feed=feed_batch, fetch=fetch) else: print("unsupport batch size {}".format(args.batch_size)) diff --git a/python/examples/imagenet/benchmark_batch.py b/python/examples/imagenet/benchmark_batch.py index 74771009..e5314257 100644 --- a/python/examples/imagenet/benchmark_batch.py +++ b/python/examples/imagenet/benchmark_batch.py @@ -50,8 +50,7 @@ def single_func(idx, resource): img = reader.process_image(img_list[i]) img = img.reshape(-1) feed_batch.append({"image": img}) - result = client.batch_predict( - feed_batch=feed_batch, fetch=fetch) + result = client.predict(feed=feed_batch, fetch=fetch) else: print("unsupport batch size {}".format(args.batch_size)) diff --git a/python/examples/imdb/benchmark_batch.py b/python/examples/imdb/benchmark_batch.py index 302d6335..d36704a7 100644 --- a/python/examples/imdb/benchmark_batch.py +++ b/python/examples/imdb/benchmark_batch.py @@ -42,8 +42,7 @@ def single_func(idx, resource): for bi in range(args.batch_size): word_ids, label = imdb_dataset.get_words_and_label(line) feed_batch.append({"words": word_ids}) - result = client.batch_predict( - feed_batch=feed_batch, fetch=["prediction"]) + result = client.predict(feed=feed_batch, fetch=["prediction"]) else: print("unsupport batch size {}".format(args.batch_size)) -- GitLab