提交 3caf0e22 编写于 作者: G guru4elephant

remove endpoint name checking

上级 57fc0122
......@@ -103,14 +103,12 @@ int GeneralReaderOp::inference() {
VLOG(2) << "print general model config done.";
// check
/*
res->reader_status = conf_check(req, model_config);
if (res->reader_status != 0) {
LOG(INFO) << "model conf of server:";
resource.print_general_model_config(model_config);
return 0;
}
*/
// package tensor
elem_type.resize(var_num);
......@@ -202,15 +200,6 @@ int GeneralReaderOp::inference() {
}
VLOG(2) << "read data from client success";
// print request
/*
std::ostringstream oss;
int64_t *example = reinterpret_cast<int64_t *>((*in)[0].data.data());
for (int i = 0; i < 10; i++) {
oss << *(example + i) << " ";
}
VLOG(2) << "head element of first feed var : " << oss.str();
*/
return 0;
}
DEFINE_OP(GeneralReaderOp);
......
......@@ -49,11 +49,6 @@ class PredictorApi {
}
Predictor* fetch_predictor(std::string ep_name) {
std::map<std::string, Endpoint*>::iterator iter;
VLOG(2) << "going to print predictor names";
for (iter = _endpoints.begin(); iter != _endpoints.end(); ++iter) {
VLOG(2) << "name: " << iter->first;
}
std::map<std::string, Endpoint*>::iterator it = _endpoints.find(ep_name);
if (it == _endpoints.end() || !it->second) {
LOG(ERROR) << "Failed fetch predictor:"
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册