提交 6eddb7a8 编写于 作者: M MRXLT

fix conflict

上级 3349a920
......@@ -55,8 +55,7 @@ def single_func(idx, resource):
for i in range(1, 27):
feed_dict["sparse_{}".format(i - 1)] = data[0][i]
feed_batch.append(feed_dict)
result = client.batch_predict(
feed_batch=feed_batch, fetch=fetch)
result = client.predict(feed=feed_batch, fetch=fetch)
else:
print("unsupport batch size {}".format(args.batch_size))
......
......@@ -56,8 +56,7 @@ def single_func(idx, resource):
feed_dict["embedding_{}.tmp_0".format(i - 1)] = data[0][
i]
feed_batch.append(feed_dict)
result = client.batch_predict(
feed_batch=feed_batch, fetch=fetch)
result = client.batch_predict(feed=feed_batch, fetch=fetch)
else:
print("unsupport batch size {}".format(args.batch_size))
......
......@@ -50,8 +50,7 @@ def single_func(idx, resource):
img = reader.process_image(img_list[i])
img = img.reshape(-1)
feed_batch.append({"image": img})
result = client.batch_predict(
feed_batch=feed_batch, fetch=fetch)
result = client.predict(feed=feed_batch, fetch=fetch)
else:
print("unsupport batch size {}".format(args.batch_size))
......
......@@ -42,8 +42,7 @@ def single_func(idx, resource):
for bi in range(args.batch_size):
word_ids, label = imdb_dataset.get_words_and_label(line)
feed_batch.append({"words": word_ids})
result = client.batch_predict(
feed_batch=feed_batch, fetch=["prediction"])
result = client.predict(feed=feed_batch, fetch=["prediction"])
else:
print("unsupport batch size {}".format(args.batch_size))
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册