diff --git a/core/predictor/src/pdserving.cpp b/core/predictor/src/pdserving.cpp index e8a7591d0d353758a3cbfd32498a80226d17358d..f0fedacf63b250ec3dd742744dc01d67d567f653 100644 --- a/core/predictor/src/pdserving.cpp +++ b/core/predictor/src/pdserving.cpp @@ -99,16 +99,16 @@ static void g_change_server_port() { if (read_proto_conf(FLAGS_inferservice_path.c_str(), FLAGS_inferservice_file.c_str(), &conf) != 0) { - LOG(WARNING) << "failed to load configure[" << FLAGS_inferservice_path - << "," << FLAGS_inferservice_file << "]."; + VLOG(2) << "failed to load configure[" << FLAGS_inferservice_path + << "," << FLAGS_inferservice_file << "]."; return; } uint32_t port = conf.port(); if (port != 0) { FLAGS_port = port; - LOG(INFO) << "use configure[" << FLAGS_inferservice_path << "/" - << FLAGS_inferservice_file << "] port[" << port - << "] instead of flags"; + VLOG(2) << "use configure[" << FLAGS_inferservice_path << "/" + << FLAGS_inferservice_file << "] port[" << port + << "] instead of flags"; } return; } @@ -157,8 +157,8 @@ int main(int argc, char** argv) { mkdir(FLAGS_log_dir.c_str(), 0777); ret = stat(FLAGS_log_dir.c_str(), &st_buf); if (ret != 0) { - LOG(WARNING) << "Log path " << FLAGS_log_dir - << " not exist, and create fail"; + VLOG(2) << "Log path " << FLAGS_log_dir + << " not exist, and create fail"; return -1; } } @@ -166,7 +166,7 @@ int main(int argc, char** argv) { FLAGS_logbufsecs = 0; FLAGS_logbuflevel = -1; #endif - LOG(INFO) << "Succ initialize logger"; + VLOG(2) << "Succ initialize logger"; // initialize resource manager if (Resource::instance().initialize(FLAGS_resource_path, @@ -175,7 +175,7 @@ int main(int argc, char** argv) { << "/" << FLAGS_resource_file; return -1; } - LOG(INFO) << "Succ initialize resource"; + VLOG(2) << "Succ initialize resource"; // initialize workflow manager if (WorkflowManager::instance().initialize(FLAGS_workflow_path, @@ -184,7 +184,7 @@ int main(int argc, char** argv) { << FLAGS_workflow_path << "/" << FLAGS_workflow_file; return -1; } - LOG(INFO) << "Succ initialize workflow"; + VLOG(2) << "Succ initialize workflow"; // initialize service manager if (InferServiceManager::instance().initialize( @@ -193,7 +193,7 @@ int main(int argc, char** argv) { << FLAGS_inferservice_path << "/" << FLAGS_inferservice_file; return -1; } - LOG(INFO) << "Succ initialize inferservice"; + VLOG(2) << "Succ initialize inferservice"; int errcode = bthread_set_worker_startfn(pthread_worker_start_fn); if (errcode != 0) { @@ -201,7 +201,7 @@ int main(int argc, char** argv) { << errcode << "]"; return -1; } - LOG(INFO) << "Succ call pthread worker start function"; + VLOG(2) << "Succ call pthread worker start function"; if (Resource::instance().cube_initialize(FLAGS_resource_path, FLAGS_resource_file) != 0) { @@ -209,7 +209,7 @@ int main(int argc, char** argv) { << FLAGS_resource_file; return -1; } - LOG(INFO) << "Succ initialize cube"; + VLOG(2) << "Succ initialize cube"; #ifndef BCLOUD @@ -220,7 +220,7 @@ int main(int argc, char** argv) { return -1; } - LOG(INFO) << "Succ initialize general model"; + VLOG(2) << "Succ initialize general model"; // FATAL messages are output to stderr FLAGS_stderrthreshold = 3; @@ -230,7 +230,7 @@ int main(int argc, char** argv) { LOG(ERROR) << "Failed start server and wait!"; return -1; } - LOG(INFO) << "Succ start service manager"; + VLOG(2) << "Succ start service manager"; if (InferServiceManager::instance().finalize() != 0) { LOG(ERROR) << "Failed finalize infer service manager."; @@ -248,6 +248,6 @@ int main(int argc, char** argv) { #else google::ShutdownGoogleLogging(); #endif - LOG(INFO) << "Paddle Inference Server exit successfully!"; + VLOG(2) << "Paddle Inference Server exit successfully!"; return 0; } diff --git a/core/sdk-cpp/include/stub_impl.hpp b/core/sdk-cpp/include/stub_impl.hpp index 30a1feae88a496e3e5beeb39298831e5de5d6d8d..6fad5b5e2c702652126bc159333046790fcefc69 100644 --- a/core/sdk-cpp/include/stub_impl.hpp +++ b/core/sdk-cpp/include/stub_impl.hpp @@ -35,11 +35,11 @@ int StubImpl::initialize(const VariantInfo& var, } _gchannel = init_channel(var, filter); - LOG(INFO) << "Create stub with tag: " << *tag << ", " << *tag_value - << ", ep: " << ep; + VLOG(2) << "Create stub with tag: " << *tag << ", " << *tag_value + << ", ep: " << ep; } else { _gchannel = init_channel(var, NULL); - LOG(INFO) << "Create stub without tag, ep " << ep; + VLOG(2) << "Create stub without tag, ep " << ep; } if (!_gchannel) { @@ -143,7 +143,7 @@ int StubImpl::thrd_initialize() { return -1; } - LOG(WARNING) << "Succ thread initialize stub impl!"; + VLOG(2) << "Succ thread initialize stub impl!"; return 0; } @@ -370,7 +370,7 @@ google::protobuf::RpcChannel* StubImpl::init_channel( // brpc parallel channel _pchannel = init_pchannel(_channel, _max_channel, _package_size, chn_options); if (_pchannel) { - LOG(INFO) << "Succ create parallel channel, count: " << _max_channel; + VLOG(2) << "Succ create parallel channel, count: " << _max_channel; return _pchannel; } @@ -384,21 +384,21 @@ brpc::ParallelChannel* StubImpl::init_pchannel( uint32_t package_size, const brpc::ChannelOptions& options) { if (channel_count <= 1) { // noneed use parallel channel - LOG(INFO) << "channel count <= 1, noneed use pchannel."; + VLOG(2) << "channel count <= 1, noneed use pchannel."; return NULL; } _pchannel = butil::get_object(); if (!_pchannel) { - LOG(FATAL) << "Failed get pchannel from object pool"; + VLOG(2) << "Failed get pchannel from object pool"; return NULL; } brpc::ParallelChannelOptions pchan_options; pchan_options.timeout_ms = options.timeout_ms; if (_pchannel->Init(&pchan_options) != 0) { - LOG(FATAL) << "Failed init parallel channel with tmo_us: " - << pchan_options.timeout_ms; + VLOG(2) << "Failed init parallel channel with tmo_us: " + << pchan_options.timeout_ms; return NULL; } diff --git a/core/sdk-cpp/src/abtest.cpp b/core/sdk-cpp/src/abtest.cpp index de594048c65de4724b158405e827d4bd72645cb5..836dc25ffb1913dc62313d7c5cdeb85958850fe3 100644 --- a/core/sdk-cpp/src/abtest.cpp +++ b/core/sdk-cpp/src/abtest.cpp @@ -52,9 +52,9 @@ int WeightedRandomRender::initialize(const google::protobuf::Message& conf) { return -1; } - LOG(INFO) << "Succ read weights list: " << weights - << ", count: " << _variant_weight_list.size() - << ", normalized: " << _normalized_sum; + VLOG(2) << "Succ read weights list: " << weights + << ", count: " << _variant_weight_list.size() + << ", normalized: " << _normalized_sum; } catch (std::bad_cast& e) { LOG(ERROR) << "Failed init WeightedRandomRender" << "from configure, err:" << e.what(); @@ -87,9 +87,9 @@ Variant* WeightedRandomRender::route(const VariantList& variants) { for (uint32_t ci = 0; ci < cand_size; ++ci) { cur_total += _variant_weight_list[ci]; if (sample < cur_total) { - LOG(INFO) << "Sample " << sample << " on " << ci - << ", _normalized: " << _normalized_sum - << ", weight: " << _variant_weight_list[ci]; + VLOG(2) << "Sample " << sample << " on " << ci + << ", _normalized: " << _normalized_sum + << ", weight: " << _variant_weight_list[ci]; return variants[ci]; } } diff --git a/core/sdk-cpp/src/config_manager.cpp b/core/sdk-cpp/src/config_manager.cpp index 8bc9f9514a8e3b84ea053ccd3eccca899db36e60..c422f0b52eba7d3a34e663f4198b9914a7722704 100644 --- a/core/sdk-cpp/src/config_manager.cpp +++ b/core/sdk-cpp/src/config_manager.cpp @@ -80,8 +80,8 @@ int EndpointConfigManager::load(const std::string& sdk_desc_str) { LOG(ERROR) << "Failed load configure" << e.what(); return -1; } - LOG(INFO) << "Success reload endpoint config file, id: " - << _current_endpointmap_id; + VLOG(2) << "Success reload endpoint config file, id: " + << _current_endpointmap_id; return 0; } @@ -128,8 +128,8 @@ int EndpointConfigManager::load() { LOG(ERROR) << "Failed load configure" << e.what(); return -1; } - LOG(INFO) << "Success reload endpoint config file, id: " - << _current_endpointmap_id; + VLOG(2) << "Success reload endpoint config file, id: " + << _current_endpointmap_id; return 0; } @@ -181,8 +181,8 @@ int EndpointConfigManager::init_one_endpoint(const configure::Predictor& conf, return -1; } - LOG(INFO) << "Succ load one endpoint, name: " << ep.endpoint_name - << ", count of variants: " << ep.vars.size() << "."; + VLOG(2) << "Succ load one endpoint, name: " << ep.endpoint_name + << ", count of variants: " << ep.vars.size() << "."; } catch (std::exception& e) { LOG(ERROR) << "Exception acccurs when load endpoint conf" << ", message: " << e.what(); @@ -258,7 +258,7 @@ int EndpointConfigManager::merge_variant(const VariantInfo& default_var, int EndpointConfigManager::parse_tag_values(SplitParameters& split) { split.tag_values.clear(); if (!split.split_tag.init || !split.tag_cands_str.init) { - LOG(WARNING) << "split info not set, skip..."; + VLOG(2) << "split info not set, skip..."; return 0; } diff --git a/core/sdk-cpp/src/endpoint.cpp b/core/sdk-cpp/src/endpoint.cpp index fb1e32d193cf618194bf2849a004cbc88c46cceb..fe0bf024a24c6a8d959bd0b96319027388457207 100644 --- a/core/sdk-cpp/src/endpoint.cpp +++ b/core/sdk-cpp/src/endpoint.cpp @@ -35,8 +35,8 @@ int Endpoint::initialize(const EndpointInfo& ep_info) { return -1; } _variant_list.push_back(var); - LOG(INFO) << "Succ create variant: " << vi - << ", endpoint:" << _endpoint_name; + VLOG(2) << "Succ create variant: " << vi + << ", endpoint:" << _endpoint_name; } return 0; @@ -51,7 +51,7 @@ int Endpoint::thrd_initialize() { return -1; } } - LOG(WARNING) << "Succ thrd initialize all vars: " << var_size; + VLOG(2) << "Succ thrd initialize all vars: " << var_size; return 0; } diff --git a/core/sdk-cpp/src/predictor_sdk.cpp b/core/sdk-cpp/src/predictor_sdk.cpp index ae976446e7e074f0b0723d9d637ee7ff320b00be..c9f4a6944bb451093d3e84dcac3c13b43f29a9c6 100644 --- a/core/sdk-cpp/src/predictor_sdk.cpp +++ b/core/sdk-cpp/src/predictor_sdk.cpp @@ -25,7 +25,7 @@ int PredictorApi::register_all() { return -1; } - LOG(WARNING) << "Succ register all components!"; + VLOG(2) << "Succ register all components!"; return 0; } @@ -66,8 +66,8 @@ int PredictorApi::create(const std::string & api_desc_str) { return -1; } - LOG(INFO) << "Succ create endpoint instance with name: " - << ep_info.endpoint_name; + VLOG(2) << "Succ create endpoint instance with name: " + << ep_info.endpoint_name; } return 0; @@ -101,7 +101,7 @@ int PredictorApi::create(const char* path, const char* file) { return -1; } - LOG(INFO) << "endpoint name: " << ep_info.endpoint_name; + VLOG(2) << "endpoint name: " << ep_info.endpoint_name; std::pair::iterator, bool> r = _endpoints.insert(std::make_pair(ep_info.endpoint_name, ep)); @@ -110,8 +110,8 @@ int PredictorApi::create(const char* path, const char* file) { return -1; } - LOG(INFO) << "Succ create endpoint instance with name: " - << ep_info.endpoint_name; + VLOG(2) << "Succ create endpoint instance with name: " + << ep_info.endpoint_name; } return 0; @@ -126,7 +126,7 @@ int PredictorApi::thrd_initialize() { return -1; } - LOG(WARNING) << "Succ thrd initialize endpoint:" << it->first; + VLOG(2) << "Succ thrd initialize endpoint:" << it->first; } return 0; } @@ -152,7 +152,7 @@ int PredictorApi::thrd_finalize() { return -1; } - LOG(INFO) << "Succ thrd finalize endpoint:" << it->first; + VLOG(2) << "Succ thrd finalize endpoint:" << it->first; } return 0; } diff --git a/core/sdk-cpp/src/variant.cpp b/core/sdk-cpp/src/variant.cpp index c11d18285b5bec6bb8982bbfb3feb7b6879a073d..ae1d787bfc2bd3b4ca57eb9d8043cd40744b9c50 100644 --- a/core/sdk-cpp/src/variant.cpp +++ b/core/sdk-cpp/src/variant.cpp @@ -53,7 +53,7 @@ int Variant::initialize(const EndpointInfo& ep_info, } if (_stub_map.size() > 0) { - LOG(INFO) << "Initialize variants from VariantInfo" + VLOG(2) << "Initialize variants from VariantInfo" << ", stubs count: " << _stub_map.size(); return 0; } @@ -66,7 +66,7 @@ int Variant::initialize(const EndpointInfo& ep_info, } _default_stub = stub; - LOG(INFO) << "Succ create default debug"; + VLOG(2) << "Succ create default debug"; return 0; } @@ -82,10 +82,10 @@ int Variant::thrd_initialize() { LOG(ERROR) << "Failed thrd initialize stub: " << iter->first; return -1; } - LOG(INFO) << "Succ thrd initialize stub:" << iter->first; + VLOG(2) << "Succ thrd initialize stub:" << iter->first; } - LOG(WARNING) << "Succ thrd initialize all stubs"; + VLOG(2) << "Succ thrd initialize all stubs"; return 0; } diff --git a/paddle_inference/inferencer-fluid-cpu/include/fluid_cpu_engine.h b/paddle_inference/inferencer-fluid-cpu/include/fluid_cpu_engine.h index 639dbc580247d1f3ff44ea75c6080630c0298161..7a7291a067da53764d4bbad5106fbc8ea672d012 100644 --- a/paddle_inference/inferencer-fluid-cpu/include/fluid_cpu_engine.h +++ b/paddle_inference/inferencer-fluid-cpu/include/fluid_cpu_engine.h @@ -138,7 +138,7 @@ class FluidCpuAnalysisCore : public FluidFamilyCore { return -1; } - LOG(WARNING) << "create paddle predictor sucess, path: " << data_path; + VLOG(2) << "create paddle predictor sucess, path: " << data_path; return 0; } }; @@ -169,7 +169,7 @@ class FluidCpuNativeCore : public FluidFamilyCore { return -1; } - LOG(WARNING) << "create paddle predictor sucess, path: " << data_path; + VLOG(2) << "create paddle predictor sucess, path: " << data_path; return 0; } }; @@ -202,7 +202,7 @@ class FluidCpuAnalysisDirCore : public FluidFamilyCore { return -1; } - LOG(WARNING) << "create paddle predictor sucess, path: " << data_path; + VLOG(2) << "create paddle predictor sucess, path: " << data_path; return 0; } }; @@ -231,7 +231,7 @@ class FluidCpuNativeDirCore : public FluidFamilyCore { return -1; } - LOG(WARNING) << "create paddle predictor sucess, path: " << data_path; + VLOG(2) << "create paddle predictor sucess, path: " << data_path; return 0; } }; @@ -240,7 +240,7 @@ class Parameter { public: Parameter() : _row(0), _col(0), _params(NULL) {} ~Parameter() { - LOG(INFO) << "before destroy Parameter, file_name[" << _file_name << "]"; + VLOG(2) << "before destroy Parameter, file_name[" << _file_name << "]"; destroy(); } @@ -254,7 +254,7 @@ class Parameter { LOG(ERROR) << "Load " << _file_name << " malloc error."; return -1; } - LOG(WARNING) << "Load parameter file[" << _file_name << "] success."; + VLOG(2) << "Load parameter file[" << _file_name << "] success."; return 0; } @@ -296,7 +296,7 @@ class Parameter { fclose(fs); fs = NULL; } - LOG(INFO) << "load " << _file_name << " read ok."; + VLOG(2) << "load " << _file_name << " read ok."; return 0; } else { LOG(ERROR) << "load " << _file_name << " read error."; @@ -329,13 +329,13 @@ class SigmoidModel { LOG(ERROR) << "load params sigmoid_w failed."; return -1; } - LOG(WARNING) << "load sigmoid_w [" << _sigmoid_w._params[0] << "] [" - << _sigmoid_w._params[1] << "]."; + VLOG(2) << "load sigmoid_w [" << _sigmoid_w._params[0] << "] [" + << _sigmoid_w._params[1] << "]."; if (0 != _sigmoid_b.init(2, 1, sigmoid_b_file) || 0 != _sigmoid_b.load()) { LOG(ERROR) << "load params sigmoid_b failed."; return -1; } - LOG(WARNING) << "load sigmoid_b [" << _sigmoid_b._params[0] << "] [" + VLOG(2) << "load sigmoid_b [" << _sigmoid_b._params[0] << "] [" << _sigmoid_b._params[1] << "]."; _exp_max_input = exp_max; _exp_min_input = exp_min; @@ -412,8 +412,8 @@ class FluidCpuWithSigmoidCore : public FluidFamilyCore { float exp_max = conf.exp_max_input(); float exp_min = conf.exp_min_input(); _core->_sigmoid_core.reset(new SigmoidModel); - LOG(INFO) << "create sigmoid core[" << _core->_sigmoid_core.get() - << "], use count[" << _core->_sigmoid_core.use_count() << "]."; + VLOG(2) << "create sigmoid core[" << _core->_sigmoid_core.get() + << "], use count[" << _core->_sigmoid_core.use_count() << "]."; ret = _core->_sigmoid_core->load( sigmoid_w_file, sigmoid_b_file, exp_max, exp_min); if (ret < 0) { @@ -444,8 +444,8 @@ class FluidCpuWithSigmoidCore : public FluidFamilyCore { LOG(ERROR) << "fail to clone paddle predictor: " << origin_core; return -1; } - LOG(INFO) << "clone sigmoid core[" << _core->_sigmoid_core.get() - << "] use count[" << _core->_sigmoid_core.use_count() << "]."; + VLOG(2) << "clone sigmoid core[" << _core->_sigmoid_core.get() + << "] use count[" << _core->_sigmoid_core.use_count() << "]."; return 0; } @@ -487,7 +487,7 @@ class FluidCpuNativeDirWithSigmoidCore : public FluidCpuWithSigmoidCore { return -1; } - LOG(WARNING) << "create paddle predictor sucess, path: " << data_path; + VLOG(2) << "create paddle predictor sucess, path: " << data_path; return 0; } }; @@ -520,7 +520,7 @@ class FluidCpuAnalysisDirWithSigmoidCore : public FluidCpuWithSigmoidCore { return -1; } - LOG(WARNING) << "create paddle predictor sucess, path: " << data_path; + VLOG(2) << "create paddle predictor sucess, path: " << data_path; return 0; } }; diff --git a/paddle_inference/inferencer-fluid-gpu/include/fluid_gpu_engine.h b/paddle_inference/inferencer-fluid-gpu/include/fluid_gpu_engine.h index 1b34bbda07eca16c8a1be50fc4b427ae6180b714..ca546207c47693d17d1ad5904d51f9888e0c5a04 100644 --- a/paddle_inference/inferencer-fluid-gpu/include/fluid_gpu_engine.h +++ b/paddle_inference/inferencer-fluid-gpu/include/fluid_gpu_engine.h @@ -143,7 +143,7 @@ class FluidGpuAnalysisCore : public FluidFamilyCore { return -1; } - LOG(WARNING) << "create paddle predictor sucess, path: " << data_path; + VLOG(2) << "create paddle predictor sucess, path: " << data_path; return 0; } }; @@ -173,7 +173,7 @@ class FluidGpuNativeCore : public FluidFamilyCore { return -1; } - LOG(WARNING) << "create paddle predictor sucess, path: " << data_path; + VLOG(2) << "create paddle predictor sucess, path: " << data_path; return 0; } }; @@ -206,7 +206,7 @@ class FluidGpuAnalysisDirCore : public FluidFamilyCore { return -1; } - LOG(WARNING) << "create paddle predictor sucess, path: " << data_path; + VLOG(2) << "create paddle predictor sucess, path: " << data_path; return 0; } }; @@ -235,7 +235,7 @@ class FluidGpuNativeDirCore : public FluidFamilyCore { return -1; } - LOG(WARNING) << "create paddle predictor sucess, path: " << data_path; + VLOG(2) << "create paddle predictor sucess, path: " << data_path; return 0; } }; @@ -258,7 +258,7 @@ class Parameter { LOG(ERROR) << "Load " << _file_name << " malloc error."; return -1; } - LOG(WARNING) << "Load parameter file[" << _file_name << "] success."; + VLOG(2) << "Load parameter file[" << _file_name << "] success."; return 0; } @@ -333,13 +333,13 @@ class SigmoidModel { LOG(ERROR) << "load params sigmoid_w failed."; return -1; } - LOG(WARNING) << "load sigmoid_w [" << _sigmoid_w._params[0] << "] [" + VLOG(2) << "load sigmoid_w [" << _sigmoid_w._params[0] << "] [" << _sigmoid_w._params[1] << "]."; if (0 != _sigmoid_b.init(2, 1, sigmoid_b_file) || 0 != _sigmoid_b.load()) { LOG(ERROR) << "load params sigmoid_b failed."; return -1; } - LOG(WARNING) << "load sigmoid_b [" << _sigmoid_b._params[0] << "] [" + VLOG(2) << "load sigmoid_b [" << _sigmoid_b._params[0] << "] [" << _sigmoid_b._params[1] << "]."; _exp_max_input = exp_max; _exp_min_input = exp_min; @@ -491,7 +491,7 @@ class FluidGpuNativeDirWithSigmoidCore : public FluidGpuWithSigmoidCore { return -1; } - LOG(WARNING) << "create paddle predictor sucess, path: " << data_path; + VLOG(2) << "create paddle predictor sucess, path: " << data_path; return 0; } }; @@ -524,7 +524,7 @@ class FluidGpuAnalysisDirWithSigmoidCore : public FluidGpuWithSigmoidCore { return -1; } - LOG(WARNING) << "create paddle predictor sucess, path: " << data_path; + VLOG(2) << "create paddle predictor sucess, path: " << data_path; return 0; } }; diff --git a/python/examples/fit_a_line/local_train.py b/python/examples/fit_a_line/local_train.py index 699ad7b67d5bb2a8f8937418a002e19991058473..fae6f72c7ddbd6d0d0e7c6742d1234ae4f6398cb 100644 --- a/python/examples/fit_a_line/local_train.py +++ b/python/examples/fit_a_line/local_train.py @@ -32,5 +32,5 @@ for pass_id in range(30): fetch_list=[avg_loss]) serving_io.save_model( - "serving_server_model", "serving_client_conf", - {"x": x}, {"y": y_predict}, fluid.default_main_program()) + "uci_housing_model", "uci_housing_client", + {"x": x}, {"price": y_predict}, fluid.default_main_program()) diff --git a/python/examples/imdb/local_train.py b/python/examples/imdb/local_train.py index 9c65dc587ea8fe06753ef09dddaf8fc82f6ca5bf..d3d7c3bae25f2e6b996685eb70301dd3f939526f 100644 --- a/python/examples/imdb/local_train.py +++ b/python/examples/imdb/local_train.py @@ -46,18 +46,17 @@ if __name__ == "__main__": dataset.set_use_var([data, label]) pipe_command = "python imdb_reader.py" dataset.set_pipe_command(pipe_command) - dataset.set_batch_size(4) + dataset.set_batch_size(128) dataset.set_filelist(filelist) dataset.set_thread(10) - from nets import cnn_net - avg_cost, acc, prediction = cnn_net(data, label, dict_dim) + from nets import bow_net + avg_cost, acc, prediction = bow_net(data, label, dict_dim) optimizer = fluid.optimizer.SGD(learning_rate=0.01) optimizer.minimize(avg_cost) exe = fluid.Executor(fluid.CPUPlace()) exe.run(fluid.default_startup_program()) epochs = 6 - save_dirname = "cnn_model" import paddle_serving_client.io as serving_io @@ -67,9 +66,5 @@ if __name__ == "__main__": logger.info("TRAIN --> pass: {}".format(i)) if i == 5: serving_io.save_model("serving_server_model", "serving_client_conf", - {"words": data, - "label": label}, { - "cost": avg_cost, - "acc": acc, - "prediction": prediction - }, fluid.default_main_program()) + {"words": data}, {"prediction": prediction}, + fluid.default_main_program())