提交 83e06b62 编写于 作者: G guru4elephant

make output of general infer as GeneralBlob

上级 744b8419
...@@ -39,6 +39,8 @@ int GeneralInferOp::inference() { ...@@ -39,6 +39,8 @@ int GeneralInferOp::inference() {
const GeneralBlob * input_blob = const GeneralBlob * input_blob =
get_depend_argument<GeneralBlob>(pre_name()); get_depend_argument<GeneralBlob>(pre_name());
GeneralBlob * output_blob = mutable_data<GeneralBlob>();
if (!input_blob) { if (!input_blob) {
LOG(ERROR) << "Failed mutable depended argument, op:" LOG(ERROR) << "Failed mutable depended argument, op:"
<< pre_name(); << pre_name();
...@@ -46,7 +48,7 @@ int GeneralInferOp::inference() { ...@@ -46,7 +48,7 @@ int GeneralInferOp::inference() {
} }
const TensorVector *in = &input_blob->tensor_vector; const TensorVector *in = &input_blob->tensor_vector;
TensorVector *out = butil::get_object<TensorVector>(); TensorVector *out = &output_blob->tensor_vector;
int batch_size = input_blob->GetBatchSize(); int batch_size = input_blob->GetBatchSize();
VLOG(2) << "infer batch size: " << batch_size; VLOG(2) << "infer batch size: " << batch_size;
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册