提交 53ff72e1 编写于 作者: G guru4elephant

make warning message of inference engine as VLOG2

上级 344a7d4c
......@@ -99,14 +99,14 @@ static void g_change_server_port() {
if (read_proto_conf(FLAGS_inferservice_path.c_str(),
FLAGS_inferservice_file.c_str(),
&conf) != 0) {
LOG(WARNING) << "failed to load configure[" << FLAGS_inferservice_path
VLOG(WARNING) << "failed to load configure[" << FLAGS_inferservice_path
<< "," << FLAGS_inferservice_file << "].";
return;
}
uint32_t port = conf.port();
if (port != 0) {
FLAGS_port = port;
LOG(INFO) << "use configure[" << FLAGS_inferservice_path << "/"
VLOG(2) << "use configure[" << FLAGS_inferservice_path << "/"
<< FLAGS_inferservice_file << "] port[" << port
<< "] instead of flags";
}
......@@ -157,7 +157,7 @@ int main(int argc, char** argv) {
mkdir(FLAGS_log_dir.c_str(), 0777);
ret = stat(FLAGS_log_dir.c_str(), &st_buf);
if (ret != 0) {
LOG(WARNING) << "Log path " << FLAGS_log_dir
VLOG(2) << "Log path " << FLAGS_log_dir
<< " not exist, and create fail";
return -1;
}
......@@ -166,7 +166,7 @@ int main(int argc, char** argv) {
FLAGS_logbufsecs = 0;
FLAGS_logbuflevel = -1;
#endif
LOG(INFO) << "Succ initialize logger";
VLOG(2) << "Succ initialize logger";
// initialize resource manager
if (Resource::instance().initialize(FLAGS_resource_path,
......@@ -175,7 +175,7 @@ int main(int argc, char** argv) {
<< "/" << FLAGS_resource_file;
return -1;
}
LOG(INFO) << "Succ initialize resource";
VLOG(2) << "Succ initialize resource";
// initialize workflow manager
if (WorkflowManager::instance().initialize(FLAGS_workflow_path,
......@@ -184,7 +184,7 @@ int main(int argc, char** argv) {
<< FLAGS_workflow_path << "/" << FLAGS_workflow_file;
return -1;
}
LOG(INFO) << "Succ initialize workflow";
VLOG(2) << "Succ initialize workflow";
// initialize service manager
if (InferServiceManager::instance().initialize(
......@@ -193,7 +193,7 @@ int main(int argc, char** argv) {
<< FLAGS_inferservice_path << "/" << FLAGS_inferservice_file;
return -1;
}
LOG(INFO) << "Succ initialize inferservice";
VLOG(2) << "Succ initialize inferservice";
int errcode = bthread_set_worker_startfn(pthread_worker_start_fn);
if (errcode != 0) {
......@@ -201,7 +201,7 @@ int main(int argc, char** argv) {
<< errcode << "]";
return -1;
}
LOG(INFO) << "Succ call pthread worker start function";
VLOG(2) << "Succ call pthread worker start function";
if (Resource::instance().cube_initialize(FLAGS_resource_path,
FLAGS_resource_file) != 0) {
......@@ -209,7 +209,7 @@ int main(int argc, char** argv) {
<< FLAGS_resource_file;
return -1;
}
LOG(INFO) << "Succ initialize cube";
VLOG(2) << "Succ initialize cube";
#ifndef BCLOUD
......@@ -220,7 +220,7 @@ int main(int argc, char** argv) {
return -1;
}
LOG(INFO) << "Succ initialize general model";
VLOG(2) << "Succ initialize general model";
// FATAL messages are output to stderr
FLAGS_stderrthreshold = 3;
......@@ -230,7 +230,7 @@ int main(int argc, char** argv) {
LOG(ERROR) << "Failed start server and wait!";
return -1;
}
LOG(INFO) << "Succ start service manager";
VLOG(2) << "Succ start service manager";
if (InferServiceManager::instance().finalize() != 0) {
LOG(ERROR) << "Failed finalize infer service manager.";
......@@ -248,6 +248,6 @@ int main(int argc, char** argv) {
#else
google::ShutdownGoogleLogging();
#endif
LOG(INFO) << "Paddle Inference Server exit successfully!";
VLOG(2) << "Paddle Inference Server exit successfully!";
return 0;
}
......@@ -138,7 +138,7 @@ class FluidCpuAnalysisCore : public FluidFamilyCore {
return -1;
}
LOG(WARNING) << "create paddle predictor sucess, path: " << data_path;
VLOG(2) << "create paddle predictor sucess, path: " << data_path;
return 0;
}
};
......@@ -169,7 +169,7 @@ class FluidCpuNativeCore : public FluidFamilyCore {
return -1;
}
LOG(WARNING) << "create paddle predictor sucess, path: " << data_path;
VLOG(2) << "create paddle predictor sucess, path: " << data_path;
return 0;
}
};
......@@ -202,7 +202,7 @@ class FluidCpuAnalysisDirCore : public FluidFamilyCore {
return -1;
}
LOG(WARNING) << "create paddle predictor sucess, path: " << data_path;
VLOG(2) << "create paddle predictor sucess, path: " << data_path;
return 0;
}
};
......@@ -231,7 +231,7 @@ class FluidCpuNativeDirCore : public FluidFamilyCore {
return -1;
}
LOG(WARNING) << "create paddle predictor sucess, path: " << data_path;
VLOG(2) << "create paddle predictor sucess, path: " << data_path;
return 0;
}
};
......@@ -240,7 +240,7 @@ class Parameter {
public:
Parameter() : _row(0), _col(0), _params(NULL) {}
~Parameter() {
LOG(INFO) << "before destroy Parameter, file_name[" << _file_name << "]";
VLOG(2) << "before destroy Parameter, file_name[" << _file_name << "]";
destroy();
}
......@@ -254,7 +254,7 @@ class Parameter {
LOG(ERROR) << "Load " << _file_name << " malloc error.";
return -1;
}
LOG(WARNING) << "Load parameter file[" << _file_name << "] success.";
VLOG(2) << "Load parameter file[" << _file_name << "] success.";
return 0;
}
......@@ -296,7 +296,7 @@ class Parameter {
fclose(fs);
fs = NULL;
}
LOG(INFO) << "load " << _file_name << " read ok.";
VLOG(2) << "load " << _file_name << " read ok.";
return 0;
} else {
LOG(ERROR) << "load " << _file_name << " read error.";
......@@ -329,13 +329,13 @@ class SigmoidModel {
LOG(ERROR) << "load params sigmoid_w failed.";
return -1;
}
LOG(WARNING) << "load sigmoid_w [" << _sigmoid_w._params[0] << "] ["
VLOG(2) << "load sigmoid_w [" << _sigmoid_w._params[0] << "] ["
<< _sigmoid_w._params[1] << "].";
if (0 != _sigmoid_b.init(2, 1, sigmoid_b_file) || 0 != _sigmoid_b.load()) {
LOG(ERROR) << "load params sigmoid_b failed.";
return -1;
}
LOG(WARNING) << "load sigmoid_b [" << _sigmoid_b._params[0] << "] ["
VLOG(2) << "load sigmoid_b [" << _sigmoid_b._params[0] << "] ["
<< _sigmoid_b._params[1] << "].";
_exp_max_input = exp_max;
_exp_min_input = exp_min;
......@@ -412,7 +412,7 @@ class FluidCpuWithSigmoidCore : public FluidFamilyCore {
float exp_max = conf.exp_max_input();
float exp_min = conf.exp_min_input();
_core->_sigmoid_core.reset(new SigmoidModel);
LOG(INFO) << "create sigmoid core[" << _core->_sigmoid_core.get()
VLOG(2) << "create sigmoid core[" << _core->_sigmoid_core.get()
<< "], use count[" << _core->_sigmoid_core.use_count() << "].";
ret = _core->_sigmoid_core->load(
sigmoid_w_file, sigmoid_b_file, exp_max, exp_min);
......@@ -444,7 +444,7 @@ class FluidCpuWithSigmoidCore : public FluidFamilyCore {
LOG(ERROR) << "fail to clone paddle predictor: " << origin_core;
return -1;
}
LOG(INFO) << "clone sigmoid core[" << _core->_sigmoid_core.get()
VLOG(2) << "clone sigmoid core[" << _core->_sigmoid_core.get()
<< "] use count[" << _core->_sigmoid_core.use_count() << "].";
return 0;
}
......@@ -487,7 +487,7 @@ class FluidCpuNativeDirWithSigmoidCore : public FluidCpuWithSigmoidCore {
return -1;
}
LOG(WARNING) << "create paddle predictor sucess, path: " << data_path;
VLOG(2) << "create paddle predictor sucess, path: " << data_path;
return 0;
}
};
......@@ -520,7 +520,7 @@ class FluidCpuAnalysisDirWithSigmoidCore : public FluidCpuWithSigmoidCore {
return -1;
}
LOG(WARNING) << "create paddle predictor sucess, path: " << data_path;
VLOG(2) << "create paddle predictor sucess, path: " << data_path;
return 0;
}
};
......
......@@ -143,7 +143,7 @@ class FluidGpuAnalysisCore : public FluidFamilyCore {
return -1;
}
LOG(WARNING) << "create paddle predictor sucess, path: " << data_path;
VLOG(2) << "create paddle predictor sucess, path: " << data_path;
return 0;
}
};
......@@ -173,7 +173,7 @@ class FluidGpuNativeCore : public FluidFamilyCore {
return -1;
}
LOG(WARNING) << "create paddle predictor sucess, path: " << data_path;
VLOG(2) << "create paddle predictor sucess, path: " << data_path;
return 0;
}
};
......@@ -206,7 +206,7 @@ class FluidGpuAnalysisDirCore : public FluidFamilyCore {
return -1;
}
LOG(WARNING) << "create paddle predictor sucess, path: " << data_path;
VLOG(2) << "create paddle predictor sucess, path: " << data_path;
return 0;
}
};
......@@ -235,7 +235,7 @@ class FluidGpuNativeDirCore : public FluidFamilyCore {
return -1;
}
LOG(WARNING) << "create paddle predictor sucess, path: " << data_path;
VLOG(2) << "create paddle predictor sucess, path: " << data_path;
return 0;
}
};
......@@ -258,7 +258,7 @@ class Parameter {
LOG(ERROR) << "Load " << _file_name << " malloc error.";
return -1;
}
LOG(WARNING) << "Load parameter file[" << _file_name << "] success.";
VLOG(2) << "Load parameter file[" << _file_name << "] success.";
return 0;
}
......@@ -333,13 +333,13 @@ class SigmoidModel {
LOG(ERROR) << "load params sigmoid_w failed.";
return -1;
}
LOG(WARNING) << "load sigmoid_w [" << _sigmoid_w._params[0] << "] ["
VLOG(2) << "load sigmoid_w [" << _sigmoid_w._params[0] << "] ["
<< _sigmoid_w._params[1] << "].";
if (0 != _sigmoid_b.init(2, 1, sigmoid_b_file) || 0 != _sigmoid_b.load()) {
LOG(ERROR) << "load params sigmoid_b failed.";
return -1;
}
LOG(WARNING) << "load sigmoid_b [" << _sigmoid_b._params[0] << "] ["
VLOG(2) << "load sigmoid_b [" << _sigmoid_b._params[0] << "] ["
<< _sigmoid_b._params[1] << "].";
_exp_max_input = exp_max;
_exp_min_input = exp_min;
......@@ -491,7 +491,7 @@ class FluidGpuNativeDirWithSigmoidCore : public FluidGpuWithSigmoidCore {
return -1;
}
LOG(WARNING) << "create paddle predictor sucess, path: " << data_path;
VLOG(2) << "create paddle predictor sucess, path: " << data_path;
return 0;
}
};
......@@ -524,7 +524,7 @@ class FluidGpuAnalysisDirWithSigmoidCore : public FluidGpuWithSigmoidCore {
return -1;
}
LOG(WARNING) << "create paddle predictor sucess, path: " << data_path;
VLOG(2) << "create paddle predictor sucess, path: " << data_path;
return 0;
}
};
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册