未验证 提交 eb16b33f 编写于 作者: T TeslaZhao 提交者: GitHub

Merge pull request #1390 from ShiningZhang/develop-profile

add log of cost time
......@@ -119,10 +119,8 @@ int ServingBrpcClient::predict(const PredictorInputs& inputs,
int64_t postprocess_start = 0;
int64_t postprocess_end = 0;
if (FLAGS_profile_client) {
if (FLAGS_profile_server) {
req.set_profile_server(true);
}
if (FLAGS_profile_server) {
req.set_profile_server(true);
}
res.Clear();
......@@ -164,6 +162,34 @@ int ServingBrpcClient::predict(const PredictorInputs& inputs,
// release predictor
_api.thrd_clear();
std::ostringstream oss;
oss << "[client]"
<< "logid=" << log_id <<",";
if (FLAGS_profile_client) {
double pre_cost = (preprocess_end - preprocess_start) / 1000.0;
double infer_cost = (client_infer_end - client_infer_start) / 1000.0;
double post_cost = (postprocess_end - postprocess_start) / 1000.0;
oss << "client_pre_cost=" << pre_cost << "ms,"
<< "client_infer_cost=" << infer_cost << "ms,"
<< "client_post_cost=" << post_cost << "ms,";
}
double client_cost = (postprocess_end - preprocess_start) / 1000.0;
oss << "client_cost=" << client_cost << "ms,";
int op_num = res.profile_time_size() / 2;
if (FLAGS_profile_server) {
for (int i = 0; i < op_num - 1; ++i) {
double t = (res.profile_time(i * 2 + 1)
- res.profile_time(i * 2)) / 1000.0;
oss << "op" << i << "=" << t << "ms,";
}
}
int i = op_num - 1;
double server_cost = (res.profile_time(i * 2 + 1)
- res.profile_time(i * 2)) / 1000.0;
oss << "server_cost=" << server_cost << "ms.";
LOG(INFO) << oss.str();
return 0;
}
......
......@@ -329,10 +329,8 @@ int PredictorClient::numpy_predict(
int64_t postprocess_start = 0;
int64_t postprocess_end = 0;
if (FLAGS_profile_client) {
if (FLAGS_profile_server) {
req.set_profile_server(true);
}
if (FLAGS_profile_server) {
req.set_profile_server(true);
}
res.Clear();
......@@ -428,6 +426,34 @@ int PredictorClient::numpy_predict(
}
_api.thrd_clear();
std::ostringstream oss;
oss << "[client]"
<< "logid=" << log_id <<",";
if (FLAGS_profile_client) {
double pre_cost = (preprocess_end - preprocess_start) / 1000.0;
double infer_cost = (client_infer_end - client_infer_start) / 1000.0;
double post_cost = (postprocess_end - postprocess_start) / 1000.0;
oss << "client_pre_cost=" << pre_cost << "ms,"
<< "client_infer_cost=" << infer_cost << "ms,"
<< "client_post_cost=" << post_cost << "ms,";
}
double client_cost = (postprocess_end - preprocess_start) / 1000.0;
oss << "client_cost=" << client_cost << "ms,";
int op_num = res.profile_time_size() / 2;
if (FLAGS_profile_server) {
for (int i = 0; i < op_num - 1; ++i) {
double t = (res.profile_time(i * 2 + 1)
- res.profile_time(i * 2)) / 1000.0;
oss << "op" << i << "=" << t << "ms,";
}
}
int i = op_num - 1;
double server_cost = (res.profile_time(i * 2 + 1)
- res.profile_time(i * 2)) / 1000.0;
oss << "server_cost=" << server_cost << "ms.";
LOG(INFO) << oss.str();
return 0;
}
} // namespace general_model
......
......@@ -276,43 +276,65 @@ class PdsCodeGenerator : public CodeGenerator {
"output_name",
google::protobuf::dots_to_colons(m->output_type()->full_name()));
if (m->name() == "inference") {
std::string inference_body = "";
inference_body += " brpc::ClosureGuard done_guard(done);\n";
inference_body += " brpc::Controller* cntl = \n";
inference_body += " static_cast<brpc::Controller*>(cntl_base);\n";
inference_body += " cntl->set_response_compress_type(brpc::COMPRESS_TYPE_GZIP);\n";
inference_body += " uint64_t log_id = request->log_id();\n";
inference_body += " cntl->set_log_id(log_id);\n";
inference_body += " ::baidu::paddle_serving::predictor::InferService* svr = \n";
inference_body += " ";
inference_body += "::baidu::paddle_serving::predictor::InferServiceManager::instance(";
inference_body += ").item(\"$service$\");\n";
inference_body += " if (svr == NULL) {\n";
inference_body += " LOG(ERROR) << \"(logid=\" << log_id << \") Not found service: ";
inference_body += "$service$\";\n";
inference_body += " cntl->SetFailed(404, \"Not found service: $service$\");\n";
inference_body += " return ;\n";
inference_body += " }\n";
inference_body += " LOG(INFO) << \"(logid=\" << log_id << \") ";
inference_body += "remote_side=\[\" << cntl->remote_side() << "; // NOLINT
inference_body += "\"\]\";\n";
inference_body += " LOG(INFO) << \"(logid=\" << log_id << \") ";
inference_body += "local_side=\[\" << cntl->local_side() << "; // NOLINT
inference_body += "\"\]\";\n";
inference_body += " LOG(INFO) << \"(logid=\" << log_id << \") ";
inference_body += "service_name=\[\" << \"$name$\" << \"\]\";\n"; // NOLINT
inference_body += " int err_code = svr->inference(request, response, log_id);\n";
inference_body += " if (err_code != 0) {\n";
inference_body += " LOG(WARNING)\n";
inference_body += " << \"(logid=\" << log_id << \") Failed call ";
inference_body += "inferservice[$name$], name[$service$]\"\n";
inference_body += " << \", error_code: \" << err_code;\n";
inference_body += " cntl->SetFailed(err_code, \"InferService inference ";
inference_body += "failed!\");\n";
inference_body += " }\n";
inference_body += " gettimeofday(&tv, NULL);\n";
inference_body += " long end = tv.tv_sec * 1000000 + tv.tv_usec;\n";
if (service_name == "GeneralModelService") {
inference_body += " std::ostringstream oss;\n";
inference_body += " oss << \"[serving]\"\n";
inference_body += " << \"logid=\" << log_id << \",\";\n";
inference_body += " int op_num = response->profile_time_size() / 2;\n";
inference_body += " for (int i = 0; i < op_num; ++i) {\n";
inference_body += " double t = (response->profile_time(i * 2 + 1)\n";
inference_body += " - response->profile_time(i * 2)) / 1000.0;\n";
inference_body += " oss << \"op\" << i << \"=\" << t << \"ms,\";\n";
inference_body += " }\n";
inference_body += " double total_time = (end - start) / 1000.0;\n";
inference_body += " oss << \"cost=\" << total_time << \"ms.\";\n";
inference_body += " // flush notice log\n";
inference_body += " LOG(INFO) << oss.str();\n";
inference_body += " response->add_profile_time(start);\n";
inference_body += " response->add_profile_time(end);\n";
} else {
inference_body += " // flush notice log\n";
inference_body += " LOG(INFO) << \"(logid=\" << log_id << \") tc=\[\" << (end - "; // NOLINT
inference_body += "start) << \"\]\";\n";
}
printer->Print(
" baidu::rpc::ClosureGuard done_guard(done);\n"
" baidu::rpc::Controller* cntl = \n"
" static_cast<baidu::rpc::Controller*>(cntl_base);\n"
" cntl->set_response_compress_type(brpc::COMPRESS_TYPE_GZIP);\n"
" uint64_t log_id = request->log_id();\n"
" cntl->set_log_id(log_id);\n"
" ::baidu::paddle_serving::predictor::InferService* svr = \n"
" "
"::baidu::paddle_serving::predictor::InferServiceManager::instance("
").item(\"$service$\");\n"
" if (svr == NULL) {\n"
" LOG(ERROR) << \"(logid=\" << log_id << \") Not found service: "
"$service$\";\n"
" cntl->SetFailed(404, \"Not found service: $service$\");\n"
" return ;\n"
" }\n"
" LOG(INFO) << \"(logid=\" << log_id << \") remote_side=\[\" " // NOLINT
"<< cntl->remote_side() << \"\]\";\n"
" LOG(INFO) << \"(logid=\" << log_id << \") local_side=\[\" " // NOLINT
"<< cntl->local_side() << \"\]\";\n"
" LOG(INFO) << \"(logid=\" << log_id << \") service_name=\[\" " // NOLINT
"<< \"$name$\" << \"\]\";\n"
" int err_code = svr->inference(request, response, log_id);\n"
" if (err_code != 0) {\n"
" LOG(WARNING)\n"
" << \"(logid=\" << log_id << \") Failed call "
"inferservice[$name$], name[$service$]\"\n"
" << \", error_code: \" << err_code;\n"
" cntl->SetFailed(err_code, \"InferService inference "
"failed!\");\n"
" }\n"
" gettimeofday(&tv, NULL);\n"
" long end = tv.tv_sec * 1000000 + tv.tv_usec;\n"
" // flush notice log\n"
" LOG(INFO) << \"(logid=\" << log_id << \") tc=\[\" << (end - " // NOLINT
"start) << \"\]\";\n", // NOLINT
inference_body.c_str(),
"name",
class_name,
"service",
......@@ -1021,45 +1043,65 @@ class PdsCodeGenerator : public CodeGenerator {
"output_name",
google::protobuf::dots_to_colons(m->output_type()->full_name()));
if (m->name() == "inference") {
std::string inference_body = "";
inference_body += " brpc::ClosureGuard done_guard(done);\n";
inference_body += " brpc::Controller* cntl = \n";
inference_body += " static_cast<brpc::Controller*>(cntl_base);\n";
inference_body += " cntl->set_response_compress_type(brpc::COMPRESS_TYPE_GZIP);\n";
inference_body += " uint64_t log_id = request->log_id();\n";
inference_body += " cntl->set_log_id(log_id);\n";
inference_body += " ::baidu::paddle_serving::predictor::InferService* svr = \n";
inference_body += " ";
inference_body += "::baidu::paddle_serving::predictor::InferServiceManager::instance(";
inference_body += ").item(\"$service$\");\n";
inference_body += " if (svr == NULL) {\n";
inference_body += " LOG(ERROR) << \"(logid=\" << log_id << \") Not found service: ";
inference_body += "$service$\";\n";
inference_body += " cntl->SetFailed(404, \"Not found service: $service$\");\n";
inference_body += " return ;\n";
inference_body += " }\n";
inference_body += " LOG(INFO) << \"(logid=\" << log_id << \") ";
inference_body += "remote_side=\[\" << cntl->remote_side() << "; // NOLINT
inference_body += "\"\]\";\n";
inference_body += " LOG(INFO) << \"(logid=\" << log_id << \") ";
inference_body += "local_side=\[\" << cntl->local_side() << "; // NOLINT
inference_body += "\"\]\";\n";
inference_body += " LOG(INFO) << \"(logid=\" << log_id << \") ";
inference_body += "service_name=\[\" << \"$name$\" << \"\]\";\n"; // NOLINT
inference_body += " int err_code = svr->inference(request, response, log_id);\n";
inference_body += " if (err_code != 0) {\n";
inference_body += " LOG(WARNING)\n";
inference_body += " << \"(logid=\" << log_id << \") Failed call ";
inference_body += "inferservice[$name$], name[$service$]\"\n";
inference_body += " << \", error_code: \" << err_code;\n";
inference_body += " cntl->SetFailed(err_code, \"InferService inference ";
inference_body += "failed!\");\n";
inference_body += " }\n";
inference_body += " gettimeofday(&tv, NULL);\n";
inference_body += " long end = tv.tv_sec * 1000000 + tv.tv_usec;\n";
if (service_name == "GeneralModelService") {
inference_body += " std::ostringstream oss;\n";
inference_body += " oss << \"[serving]\"\n";
inference_body += " << \"logid=\" << log_id << \",\";\n";
inference_body += " int op_num = response->profile_time_size() / 2;\n";
inference_body += " for (int i = 0; i < op_num; ++i) {\n";
inference_body += " double t = (response->profile_time(i * 2 + 1)\n";
inference_body += " - response->profile_time(i * 2)) / 1000.0;\n";
inference_body += " oss << \"op\" << i << \"=\" << t << \"ms,\";\n";
inference_body += " }\n";
inference_body += " double total_time = (end - start) / 1000.0;\n";
inference_body += " oss << \"cost=\" << total_time << \"ms.\";\n";
inference_body += " // flush notice log\n";
inference_body += " LOG(INFO) << oss.str();\n";
inference_body += " response->add_profile_time(start);\n";
inference_body += " response->add_profile_time(end);\n";
} else {
inference_body += " // flush notice log\n";
inference_body += " LOG(INFO) << \"(logid=\" << log_id << \") tc=\[\" << (end - "; // NOLINT
inference_body += "start) << \"\]\";\n";
}
printer->Print(
" brpc::ClosureGuard done_guard(done);\n"
" brpc::Controller* cntl = \n"
" static_cast<brpc::Controller*>(cntl_base);\n"
" cntl->set_response_compress_type(brpc::COMPRESS_TYPE_GZIP);\n"
" uint64_t log_id = request->log_id();\n"
" cntl->set_log_id(log_id);\n"
" ::baidu::paddle_serving::predictor::InferService* svr = \n"
" "
"::baidu::paddle_serving::predictor::InferServiceManager::instance("
").item(\"$service$\");\n"
" if (svr == NULL) {\n"
" LOG(ERROR) << \"(logid=\" << log_id << \") Not found service: "
"$service$\";\n"
" cntl->SetFailed(404, \"Not found service: $service$\");\n"
" return ;\n"
" }\n"
" LOG(INFO) << \"(logid=\" << log_id << \") "
"remote_side=\[\" << cntl->remote_side() << " // NOLINT
"\"\]\";\n"
" LOG(INFO) << \"(logid=\" << log_id << \") "
"local_side=\[\" << cntl->local_side() << " // NOLINT
"\"\]\";\n"
" LOG(INFO) << \"(logid=\" << log_id << \") "
"service_name=\[\" << \"$name$\" << \"\]\";\n" // NOLINT
" int err_code = svr->inference(request, response, log_id);\n"
" if (err_code != 0) {\n"
" LOG(WARNING)\n"
" << \"(logid=\" << log_id << \") Failed call "
"inferservice[$name$], name[$service$]\"\n"
" << \", error_code: \" << err_code;\n"
" cntl->SetFailed(err_code, \"InferService inference "
"failed!\");\n"
" }\n"
" gettimeofday(&tv, NULL);\n"
" long end = tv.tv_sec * 1000000 + tv.tv_usec;\n"
" // flush notice log\n"
" LOG(INFO) << \"(logid=\" << log_id << \") tc=\[\" << (end - " // NOLINT
"start) << \"\]\";\n", // NOLINT
inference_body.c_str(),
"name",
class_name,
"service",
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册