提交 d5bed961 编写于 作者: M MRXLT

fix bug for batch predict test=serving

上级 d234a142
...@@ -228,16 +228,17 @@ class Client(object): ...@@ -228,16 +228,17 @@ class Client(object):
fetch_names, result_batch, self.pid) fetch_names, result_batch, self.pid)
result_map_batch = [] result_map_batch = []
for index in range(batch_size): result_map = {}
result_map = {} for i, name in enumerate(fetch_names):
for i, name in enumerate(fetch_names): if self.fetch_names_to_type_[name] == int_type:
if self.fetch_names_to_type_[name] == int_type: result_map[name] = result_batch.get_int64_by_name(name)
result_map[name] = result_batch.get_int64_by_name(name)[ elif self.fetch_names_to_type_[name] == float_type:
index] result_map[name] = result_batch.get_float_by_name(name)
elif self.fetch_names_to_type_[name] == float_type: for i in range(batch_size):
result_map[name] = result_batch.get_float_by_name(name)[ single_result = {}
index] for key in result_map:
result_map_batch.append(result_map) single_result[key] = result_map[key][i]
result_map_batch.append(single_result)
return result_map_batch return result_map_batch
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册