提交 400bf520 编写于 作者: G guru4elephant

refine VLOG

上级 53ff72e1
...@@ -99,8 +99,8 @@ static void g_change_server_port() { ...@@ -99,8 +99,8 @@ static void g_change_server_port() {
if (read_proto_conf(FLAGS_inferservice_path.c_str(), if (read_proto_conf(FLAGS_inferservice_path.c_str(),
FLAGS_inferservice_file.c_str(), FLAGS_inferservice_file.c_str(),
&conf) != 0) { &conf) != 0) {
VLOG(WARNING) << "failed to load configure[" << FLAGS_inferservice_path VLOG(2) << "failed to load configure[" << FLAGS_inferservice_path
<< "," << FLAGS_inferservice_file << "]."; << "," << FLAGS_inferservice_file << "].";
return; return;
} }
uint32_t port = conf.port(); uint32_t port = conf.port();
......
...@@ -35,11 +35,11 @@ int StubImpl<T, C, R, I, O>::initialize(const VariantInfo& var, ...@@ -35,11 +35,11 @@ int StubImpl<T, C, R, I, O>::initialize(const VariantInfo& var,
} }
_gchannel = init_channel(var, filter); _gchannel = init_channel(var, filter);
LOG(INFO) << "Create stub with tag: " << *tag << ", " << *tag_value VLOG(2) << "Create stub with tag: " << *tag << ", " << *tag_value
<< ", ep: " << ep; << ", ep: " << ep;
} else { } else {
_gchannel = init_channel(var, NULL); _gchannel = init_channel(var, NULL);
LOG(INFO) << "Create stub without tag, ep " << ep; VLOG(2) << "Create stub without tag, ep " << ep;
} }
if (!_gchannel) { if (!_gchannel) {
...@@ -143,7 +143,7 @@ int StubImpl<T, C, R, I, O>::thrd_initialize() { ...@@ -143,7 +143,7 @@ int StubImpl<T, C, R, I, O>::thrd_initialize() {
return -1; return -1;
} }
LOG(WARNING) << "Succ thread initialize stub impl!"; VLOG(2) << "Succ thread initialize stub impl!";
return 0; return 0;
} }
...@@ -370,7 +370,7 @@ google::protobuf::RpcChannel* StubImpl<T, C, R, I, O>::init_channel( ...@@ -370,7 +370,7 @@ google::protobuf::RpcChannel* StubImpl<T, C, R, I, O>::init_channel(
// brpc parallel channel // brpc parallel channel
_pchannel = init_pchannel(_channel, _max_channel, _package_size, chn_options); _pchannel = init_pchannel(_channel, _max_channel, _package_size, chn_options);
if (_pchannel) { if (_pchannel) {
LOG(INFO) << "Succ create parallel channel, count: " << _max_channel; VLOG(2) << "Succ create parallel channel, count: " << _max_channel;
return _pchannel; return _pchannel;
} }
...@@ -384,21 +384,21 @@ brpc::ParallelChannel* StubImpl<T, C, R, I, O>::init_pchannel( ...@@ -384,21 +384,21 @@ brpc::ParallelChannel* StubImpl<T, C, R, I, O>::init_pchannel(
uint32_t package_size, uint32_t package_size,
const brpc::ChannelOptions& options) { const brpc::ChannelOptions& options) {
if (channel_count <= 1) { // noneed use parallel channel if (channel_count <= 1) { // noneed use parallel channel
LOG(INFO) << "channel count <= 1, noneed use pchannel."; VLOG(2) << "channel count <= 1, noneed use pchannel.";
return NULL; return NULL;
} }
_pchannel = butil::get_object<brpc::ParallelChannel>(); _pchannel = butil::get_object<brpc::ParallelChannel>();
if (!_pchannel) { if (!_pchannel) {
LOG(FATAL) << "Failed get pchannel from object pool"; VLOG(2) << "Failed get pchannel from object pool";
return NULL; return NULL;
} }
brpc::ParallelChannelOptions pchan_options; brpc::ParallelChannelOptions pchan_options;
pchan_options.timeout_ms = options.timeout_ms; pchan_options.timeout_ms = options.timeout_ms;
if (_pchannel->Init(&pchan_options) != 0) { if (_pchannel->Init(&pchan_options) != 0) {
LOG(FATAL) << "Failed init parallel channel with tmo_us: " VLOG(2) << "Failed init parallel channel with tmo_us: "
<< pchan_options.timeout_ms; << pchan_options.timeout_ms;
return NULL; return NULL;
} }
......
...@@ -52,9 +52,9 @@ int WeightedRandomRender::initialize(const google::protobuf::Message& conf) { ...@@ -52,9 +52,9 @@ int WeightedRandomRender::initialize(const google::protobuf::Message& conf) {
return -1; return -1;
} }
LOG(INFO) << "Succ read weights list: " << weights VLOG(2) << "Succ read weights list: " << weights
<< ", count: " << _variant_weight_list.size() << ", count: " << _variant_weight_list.size()
<< ", normalized: " << _normalized_sum; << ", normalized: " << _normalized_sum;
} catch (std::bad_cast& e) { } catch (std::bad_cast& e) {
LOG(ERROR) << "Failed init WeightedRandomRender" LOG(ERROR) << "Failed init WeightedRandomRender"
<< "from configure, err:" << e.what(); << "from configure, err:" << e.what();
...@@ -87,9 +87,9 @@ Variant* WeightedRandomRender::route(const VariantList& variants) { ...@@ -87,9 +87,9 @@ Variant* WeightedRandomRender::route(const VariantList& variants) {
for (uint32_t ci = 0; ci < cand_size; ++ci) { for (uint32_t ci = 0; ci < cand_size; ++ci) {
cur_total += _variant_weight_list[ci]; cur_total += _variant_weight_list[ci];
if (sample < cur_total) { if (sample < cur_total) {
LOG(INFO) << "Sample " << sample << " on " << ci VLOG(2) << "Sample " << sample << " on " << ci
<< ", _normalized: " << _normalized_sum << ", _normalized: " << _normalized_sum
<< ", weight: " << _variant_weight_list[ci]; << ", weight: " << _variant_weight_list[ci];
return variants[ci]; return variants[ci];
} }
} }
......
...@@ -80,8 +80,8 @@ int EndpointConfigManager::load(const std::string& sdk_desc_str) { ...@@ -80,8 +80,8 @@ int EndpointConfigManager::load(const std::string& sdk_desc_str) {
LOG(ERROR) << "Failed load configure" << e.what(); LOG(ERROR) << "Failed load configure" << e.what();
return -1; return -1;
} }
LOG(INFO) << "Success reload endpoint config file, id: " VLOG(2) << "Success reload endpoint config file, id: "
<< _current_endpointmap_id; << _current_endpointmap_id;
return 0; return 0;
} }
...@@ -128,8 +128,8 @@ int EndpointConfigManager::load() { ...@@ -128,8 +128,8 @@ int EndpointConfigManager::load() {
LOG(ERROR) << "Failed load configure" << e.what(); LOG(ERROR) << "Failed load configure" << e.what();
return -1; return -1;
} }
LOG(INFO) << "Success reload endpoint config file, id: " VLOG(2) << "Success reload endpoint config file, id: "
<< _current_endpointmap_id; << _current_endpointmap_id;
return 0; return 0;
} }
...@@ -181,8 +181,8 @@ int EndpointConfigManager::init_one_endpoint(const configure::Predictor& conf, ...@@ -181,8 +181,8 @@ int EndpointConfigManager::init_one_endpoint(const configure::Predictor& conf,
return -1; return -1;
} }
LOG(INFO) << "Succ load one endpoint, name: " << ep.endpoint_name VLOG(2) << "Succ load one endpoint, name: " << ep.endpoint_name
<< ", count of variants: " << ep.vars.size() << "."; << ", count of variants: " << ep.vars.size() << ".";
} catch (std::exception& e) { } catch (std::exception& e) {
LOG(ERROR) << "Exception acccurs when load endpoint conf" LOG(ERROR) << "Exception acccurs when load endpoint conf"
<< ", message: " << e.what(); << ", message: " << e.what();
...@@ -258,7 +258,7 @@ int EndpointConfigManager::merge_variant(const VariantInfo& default_var, ...@@ -258,7 +258,7 @@ int EndpointConfigManager::merge_variant(const VariantInfo& default_var,
int EndpointConfigManager::parse_tag_values(SplitParameters& split) { int EndpointConfigManager::parse_tag_values(SplitParameters& split) {
split.tag_values.clear(); split.tag_values.clear();
if (!split.split_tag.init || !split.tag_cands_str.init) { if (!split.split_tag.init || !split.tag_cands_str.init) {
LOG(WARNING) << "split info not set, skip..."; VLOG(2) << "split info not set, skip...";
return 0; return 0;
} }
......
...@@ -35,8 +35,8 @@ int Endpoint::initialize(const EndpointInfo& ep_info) { ...@@ -35,8 +35,8 @@ int Endpoint::initialize(const EndpointInfo& ep_info) {
return -1; return -1;
} }
_variant_list.push_back(var); _variant_list.push_back(var);
LOG(INFO) << "Succ create variant: " << vi VLOG(2) << "Succ create variant: " << vi
<< ", endpoint:" << _endpoint_name; << ", endpoint:" << _endpoint_name;
} }
return 0; return 0;
...@@ -51,7 +51,7 @@ int Endpoint::thrd_initialize() { ...@@ -51,7 +51,7 @@ int Endpoint::thrd_initialize() {
return -1; return -1;
} }
} }
LOG(WARNING) << "Succ thrd initialize all vars: " << var_size; VLOG(2) << "Succ thrd initialize all vars: " << var_size;
return 0; return 0;
} }
......
...@@ -25,7 +25,7 @@ int PredictorApi::register_all() { ...@@ -25,7 +25,7 @@ int PredictorApi::register_all() {
return -1; return -1;
} }
LOG(WARNING) << "Succ register all components!"; VLOG(2) << "Succ register all components!";
return 0; return 0;
} }
...@@ -66,8 +66,8 @@ int PredictorApi::create(const std::string & api_desc_str) { ...@@ -66,8 +66,8 @@ int PredictorApi::create(const std::string & api_desc_str) {
return -1; return -1;
} }
LOG(INFO) << "Succ create endpoint instance with name: " VLOG(2) << "Succ create endpoint instance with name: "
<< ep_info.endpoint_name; << ep_info.endpoint_name;
} }
return 0; return 0;
...@@ -101,7 +101,7 @@ int PredictorApi::create(const char* path, const char* file) { ...@@ -101,7 +101,7 @@ int PredictorApi::create(const char* path, const char* file) {
return -1; return -1;
} }
LOG(INFO) << "endpoint name: " << ep_info.endpoint_name; VLOG(2) << "endpoint name: " << ep_info.endpoint_name;
std::pair<std::map<std::string, Endpoint*>::iterator, bool> r = std::pair<std::map<std::string, Endpoint*>::iterator, bool> r =
_endpoints.insert(std::make_pair(ep_info.endpoint_name, ep)); _endpoints.insert(std::make_pair(ep_info.endpoint_name, ep));
...@@ -110,8 +110,8 @@ int PredictorApi::create(const char* path, const char* file) { ...@@ -110,8 +110,8 @@ int PredictorApi::create(const char* path, const char* file) {
return -1; return -1;
} }
LOG(INFO) << "Succ create endpoint instance with name: " VLOG(2) << "Succ create endpoint instance with name: "
<< ep_info.endpoint_name; << ep_info.endpoint_name;
} }
return 0; return 0;
...@@ -126,7 +126,7 @@ int PredictorApi::thrd_initialize() { ...@@ -126,7 +126,7 @@ int PredictorApi::thrd_initialize() {
return -1; return -1;
} }
LOG(WARNING) << "Succ thrd initialize endpoint:" << it->first; VLOG(2) << "Succ thrd initialize endpoint:" << it->first;
} }
return 0; return 0;
} }
...@@ -152,7 +152,7 @@ int PredictorApi::thrd_finalize() { ...@@ -152,7 +152,7 @@ int PredictorApi::thrd_finalize() {
return -1; return -1;
} }
LOG(INFO) << "Succ thrd finalize endpoint:" << it->first; VLOG(2) << "Succ thrd finalize endpoint:" << it->first;
} }
return 0; return 0;
} }
......
...@@ -53,7 +53,7 @@ int Variant::initialize(const EndpointInfo& ep_info, ...@@ -53,7 +53,7 @@ int Variant::initialize(const EndpointInfo& ep_info,
} }
if (_stub_map.size() > 0) { if (_stub_map.size() > 0) {
LOG(INFO) << "Initialize variants from VariantInfo" VLOG(2) << "Initialize variants from VariantInfo"
<< ", stubs count: " << _stub_map.size(); << ", stubs count: " << _stub_map.size();
return 0; return 0;
} }
...@@ -66,7 +66,7 @@ int Variant::initialize(const EndpointInfo& ep_info, ...@@ -66,7 +66,7 @@ int Variant::initialize(const EndpointInfo& ep_info,
} }
_default_stub = stub; _default_stub = stub;
LOG(INFO) << "Succ create default debug"; VLOG(2) << "Succ create default debug";
return 0; return 0;
} }
...@@ -82,10 +82,10 @@ int Variant::thrd_initialize() { ...@@ -82,10 +82,10 @@ int Variant::thrd_initialize() {
LOG(ERROR) << "Failed thrd initialize stub: " << iter->first; LOG(ERROR) << "Failed thrd initialize stub: " << iter->first;
return -1; return -1;
} }
LOG(INFO) << "Succ thrd initialize stub:" << iter->first; VLOG(2) << "Succ thrd initialize stub:" << iter->first;
} }
LOG(WARNING) << "Succ thrd initialize all stubs"; VLOG(2) << "Succ thrd initialize all stubs";
return 0; return 0;
} }
......
...@@ -32,5 +32,5 @@ for pass_id in range(30): ...@@ -32,5 +32,5 @@ for pass_id in range(30):
fetch_list=[avg_loss]) fetch_list=[avg_loss])
serving_io.save_model( serving_io.save_model(
"serving_server_model", "serving_client_conf", "uci_housing_model", "uci_housing_client",
{"x": x}, {"y": y_predict}, fluid.default_main_program()) {"x": x}, {"price": y_predict}, fluid.default_main_program())
...@@ -46,18 +46,17 @@ if __name__ == "__main__": ...@@ -46,18 +46,17 @@ if __name__ == "__main__":
dataset.set_use_var([data, label]) dataset.set_use_var([data, label])
pipe_command = "python imdb_reader.py" pipe_command = "python imdb_reader.py"
dataset.set_pipe_command(pipe_command) dataset.set_pipe_command(pipe_command)
dataset.set_batch_size(4) dataset.set_batch_size(128)
dataset.set_filelist(filelist) dataset.set_filelist(filelist)
dataset.set_thread(10) dataset.set_thread(10)
from nets import cnn_net from nets import bow_net
avg_cost, acc, prediction = cnn_net(data, label, dict_dim) avg_cost, acc, prediction = bow_net(data, label, dict_dim)
optimizer = fluid.optimizer.SGD(learning_rate=0.01) optimizer = fluid.optimizer.SGD(learning_rate=0.01)
optimizer.minimize(avg_cost) optimizer.minimize(avg_cost)
exe = fluid.Executor(fluid.CPUPlace()) exe = fluid.Executor(fluid.CPUPlace())
exe.run(fluid.default_startup_program()) exe.run(fluid.default_startup_program())
epochs = 6 epochs = 6
save_dirname = "cnn_model"
import paddle_serving_client.io as serving_io import paddle_serving_client.io as serving_io
...@@ -67,9 +66,5 @@ if __name__ == "__main__": ...@@ -67,9 +66,5 @@ if __name__ == "__main__":
logger.info("TRAIN --> pass: {}".format(i)) logger.info("TRAIN --> pass: {}".format(i))
if i == 5: if i == 5:
serving_io.save_model("serving_server_model", "serving_client_conf", serving_io.save_model("serving_server_model", "serving_client_conf",
{"words": data, {"words": data}, {"prediction": prediction},
"label": label}, { fluid.default_main_program())
"cost": avg_cost,
"acc": acc,
"prediction": prediction
}, fluid.default_main_program())
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册