提交 3a62e888 编写于 作者: W wangguibao

Merge branch 'develop' into binary_inference_lib

......@@ -18,7 +18,12 @@
#include <string>
#include <vector>
#ifdef BCLOUD
#include "baidu/rpc/server.h"
#else
#include "brpc/server.h"
#endif
#include "cube/cube-api/cube.pb.h"
#include "cube/cube-api/include/meta.h"
......
......@@ -19,11 +19,25 @@
#include <unordered_map>
#include <vector>
#ifdef BCLOUD
#include "baidu/rpc/channel.h"
#include "baidu/rpc/parallel_channel.h"
#include "rapidjson/document.h"
#else
#include "brpc/channel.h"
#include "brpc/parallel_channel.h"
#include "butil/third_party/rapidjson/document.h"
#endif
#include "bvar/bvar.h"
#ifdef BCLOUD
namespace brpc = baidu::rpc;
#ifndef BUTIL_RAPIDJSON_NAMESPACE
#define BUTIL_RAPIDJSON_NAMESPACE RAPIDJSON_NAMESPACE
#endif
#endif
namespace rec {
namespace mcube {
......
......@@ -13,8 +13,14 @@
// limitations under the License.
#include "cube/cube-api/include/cube_api.h"
#ifdef BCLOUD
#include <baidu/rpc/channel.h>
#include <baidu/rpc/parallel_channel.h>
#else
#include <brpc/channel.h>
#include <brpc/parallel_channel.h>
#endif
#include <google/protobuf/descriptor.h>
#include "cube/cube-api/include/cube_api_bvar.h"
......@@ -25,6 +31,10 @@ namespace {
static ::rec::mcube::CubeAPI* g_ins = NULL;
}
#ifdef BCLOUD
namespace brpc = baidu::rpc;
#endif
namespace rec {
namespace mcube {
......
......@@ -26,6 +26,10 @@ namespace {
static ::rec::mcube::Meta* g_ins = NULL;
}
#ifdef BCLOUD
namespace brpc = baidu::rpc;
#endif
namespace rec {
namespace mcube {
......
......@@ -20,11 +20,6 @@ set(CMAKE_CXX_STANDARD_REQUIRED ON)
include_directories(SYSTEM ${CMAKE_CURRENT_LIST_DIR}/include)
include_directories(SYSTEM ${CMAKE_CURRENT_BINARY_DIR}/../)
find_library(CURL_LIB NAMES curl)
if (NOT CURL_LIB)
message(FATAL_ERROR "Fail to find curl")
endif()
add_executable(cube-builder src/main.cpp include/cube-builder/util.h src/util.cpp src/builder_job.cpp include/cube-builder/builder_job.h include/cube-builder/define.h src/seqfile_reader.cpp include/cube-builder/seqfile_reader.h include/cube-builder/raw_reader.h include/cube-builder/vtext.h src/crovl_builder_increment.cpp include/cube-builder/crovl_builder_increment.h src/curl_simple.cpp include/cube-builder/curl_simple.h)
add_dependencies(cube-builder jsoncpp)
......@@ -33,6 +28,7 @@ set(DYNAMIC_LIB
gflags
jsoncpp
brpc
-lcurl
-lssl
-lcrypto
${CURL_LIB}
......
--port=8027
--port=8000
--dict_split=1
--in_mem=true
--log_dir=./log/
\ No newline at end of file
--log_dir=./log/
......@@ -320,7 +320,7 @@ def prune_program():
### 2.5 裁剪过程串到一起
我们提供了完整的裁剪CTR预估模型的脚本文件save_program.py,同[CTR分布式训练任务](doc/DISTRIBUTED_TRAINING_AND_SERVING.md)一起发布,可以在trainer和pserver容器的训练脚本目录下找到
我们提供了完整的裁剪CTR预估模型的脚本文件save_program.py,同[CTR分布式训练和Serving流程化部署](https://github.com/PaddlePaddle/Serving/blob/master/doc/DEPLOY.md)一起发布,可以在trainer和pserver容器的训练脚本目录下找到,也可以在[这里](https://github.com/PaddlePaddle/Serving/tree/master/doc/resource)下载。
## 3. 整个预测计算流程
......
......@@ -10,7 +10,9 @@ C++编译器 (验证过的版本:GCC 4.8.2/5.4.0)
python (验证过的版本:2.7)
Go编译器 (验证过的版本:1.9.2)
Go编译器 (验证过的版本:1.9.2/1.12.0)
openssl & openssl-devel
## 编译
......
......@@ -2,7 +2,7 @@ FILE(GLOB fluid_cpu_engine_srcs ${CMAKE_CURRENT_LIST_DIR}/src/*.cpp)
add_library(fluid_cpu_engine ${fluid_cpu_engine_srcs})
target_include_directories(fluid_cpu_engine PUBLIC
${CMAKE_BINARY_DIR}/Paddle/fluid_install_dir/)
add_dependencies(fluid_cpu_engine pdserving extern_paddle configure)
add_dependencies(fluid_cpu_engine pdserving extern_paddle configure kvdb)
target_link_libraries(fluid_cpu_engine pdserving paddle_fluid iomp5 mklml_intel -lpthread -lcrypto -lm -lrt -lssl -ldl -lz)
install(TARGETS fluid_cpu_engine
......
......@@ -2,7 +2,7 @@ FILE(GLOB fluid_gpu_engine_srcs ${CMAKE_CURRENT_LIST_DIR}/src/*.cpp)
add_library(fluid_gpu_engine ${fluid_gpu_engine_srcs})
target_include_directories(fluid_gpu_engine PUBLIC
${CMAKE_BINARY_DIR}/Paddle/fluid_install_dir/)
add_dependencies(fluid_gpu_engine pdserving extern_paddle configure)
add_dependencies(fluid_gpu_engine pdserving extern_paddle configure kvdb)
target_link_libraries(fluid_gpu_engine pdserving paddle_fluid iomp5 mklml_intel -lpthread -lcrypto -lm -lrt -lssl -ldl -lz)
install(TARGETS fluid_gpu_engine
......
......@@ -9,6 +9,7 @@ set(SRC_LIST ${CMAKE_CURRENT_LIST_DIR}/src/test_rocksdb.cpp
${CMAKE_CURRENT_LIST_DIR}/src/gtest_kvdb.cpp)
add_library(kvdb ${SRC_LIST})
add_dependencies(kvdb rocksdb)
install(TARGETS kvdb ARCHIVE DESTINATION ${PADDLE_SERVING_INSTALL_DIR}/lib/)
add_executable(kvdb_test ${SRC_LIST})
......
......@@ -45,6 +45,7 @@ class AbstractKVDB {
virtual void SetDBName(std::string) = 0;
virtual void Set(std::string key, std::string value) = 0;
virtual std::string Get(std::string key) = 0;
virtual void Close() = 0;
virtual ~AbstractKVDB() = 0;
};
......
......@@ -32,7 +32,7 @@ class RocksDBWrapper {
void SetDBName(std::string db_name);
static std::shared_ptr<RocksDBWrapper> RocksDBWrapperFactory(
std::string db_name = "SparseMatrix");
void Close();
private:
rocksdb::DB *db_;
std::string db_name_;
......
......@@ -21,6 +21,7 @@ class RocksKVDB : public AbstractKVDB {
void SetDBName(std::string);
void Set(std::string key, std::string value);
std::string Get(std::string key);
void Close();
~RocksKVDB();
private:
......
......@@ -54,6 +54,7 @@ TEST_F(KVDBTest, AbstractKVDB_Unit_Test) {
std::string val = kvdb->Get(std::to_string(i));
ASSERT_EQ(val, std::to_string(i * 2));
}
kvdb->Close();
}
TEST_F(KVDBTest, FileReader_Unit_Test) {
......@@ -82,43 +83,6 @@ TEST_F(KVDBTest, FileReader_Unit_Test) {
ASSERT_NE(timestamp_2, timestamp_3);
}
#include <cmath>
TEST_F(KVDBTest, ParamDict_Unit_Test) {
std::string test_in_filename = "abs_dict_reader_test_in.txt";
param_dict->SetFileReaderLst({test_in_filename});
param_dict->SetReader([](std::string text) {
auto split = [](const std::string& s,
std::vector<std::string>& sv,
const char* delim = " ") {
sv.clear();
char* buffer = new char[s.size() + 1];
std::copy(s.begin(), s.end(), buffer);
char* p = strtok(buffer, delim);
do {
sv.push_back(p);
} while ((p = strtok(NULL, delim)));
return;
};
std::vector<std::string> text_split;
split(text, text_split, " ");
std::string key = text_split[0];
text_split.erase(text_split.begin());
return make_pair(key, text_split);
});
param_dict->CreateKVDB();
GenerateTestIn(test_in_filename);
param_dict->UpdateBaseModel();
std::this_thread::sleep_for(std::chrono::seconds(2));
std::vector<float> test_vec = param_dict->GetSparseValue("1", "");
ASSERT_LT(fabs(test_vec[0] - 1.0), 1e-2);
UpdateTestIn(test_in_filename);
param_dict->UpdateDeltaModel();
}
void GenerateTestIn(std::string filename) {
std::ifstream in_file(filename);
if (in_file.good()) {
......
......@@ -140,4 +140,5 @@ void ParamDict::CreateKVDB() {
this->back_db->CreateDB();
}
ParamDict::~ParamDict() {}
ParamDict::~ParamDict() {
}
......@@ -48,6 +48,14 @@ void RocksDBWrapper::SetDBName(std::string db_name) {
this->db_name_ = db_name;
}
void RocksDBWrapper::Close() {
if (db_ != nullptr) {
db_->Close();
delete(db_);
db_ = nullptr;
}
}
std::shared_ptr<RocksDBWrapper> RocksDBWrapper::RocksDBWrapperFactory(
std::string db_name) {
return std::make_shared<RocksDBWrapper>(db_name);
......
......@@ -32,6 +32,12 @@ void RocksKVDB::Set(std::string key, std::string value) {
return;
}
void RocksKVDB::Close() {
this->db_->Close();
}
std::string RocksKVDB::Get(std::string key) { return this->db_->Get(key); }
RocksKVDB::~RocksKVDB() {}
RocksKVDB::~RocksKVDB() {
this->db_->Close();
}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册