From ff7c0a2322dbce1e29b8fd66b09c85595f795128 Mon Sep 17 00:00:00 2001 From: xiaolil1 Date: Tue, 13 Nov 2018 22:37:51 +0800 Subject: [PATCH] enable key reuse --- paddle/fluid/operators/conv_mkldnn_op.cc | 129 ++++++++++++++++++++--- paddle/fluid/platform/mkldnn_helper.h | 103 +++++++++++++++++- 2 files changed, 216 insertions(+), 16 deletions(-) diff --git a/paddle/fluid/operators/conv_mkldnn_op.cc b/paddle/fluid/operators/conv_mkldnn_op.cc index 322e6fa2670..e581e23a608 100644 --- a/paddle/fluid/operators/conv_mkldnn_op.cc +++ b/paddle/fluid/operators/conv_mkldnn_op.cc @@ -293,6 +293,102 @@ class ConvMKLDNNHandler : public platform::MKLDNNHandler { conv_bwd_data_pd_; }; + struct key_desc{ + struct Hash{ + std::size_t operator()(const key_desc &key) const{ + int input_dim = 0; + int weights_dim = 0; + int stride_value = 0; + int padding_value = 0; + int dilation_value = 0; + for(size_t i=0; i hasher; + return hasher( (input_dim << 8) + + (weights_dim << 8 * 2) + + (stride_value << 8 * 3) + + (padding_value << 8) + + (dilation_value << 8 * 2) + + (key.groups << 8 * 3)); + } + }; + + std::vector input_tz; + std::vector weights_tz; + std::vector strides; + std::vector paddings; + std::vector dilations; + int groups; + const std::string suffix; + key_desc(std::vector input_tz, std::vector weights_tz, std::vector strides, std::vector paddings, std::vector dilations,int groups,const std::string suffix): input_tz(input_tz), weights_tz(weights_tz), strides(strides), paddings(paddings), dilations(dilations), groups(groups), suffix(suffix) {} + + bool operator==(const key_desc o) const{ + for(size_t i=0; i &key_map, key_desc key_dsr, std::string key){ + auto it = key_map.find(key_dsr); + if (it == key_map.end()) { + key_map[key_dsr] = key; // create new blob + } else { + (*it).second = key; // set data to existing blob + } + return; + } + + std::string GetKeyMap(std::unordered_map &key_map, key_desc key_dsr){ + auto it = key_map.find(key_dsr); + if (it != key_map.end()) { + return (*it).second; + } + return ""; + } +}; + template class ConvMKLDNNOpKernel : public paddle::framework::OpKernel { public: @@ -353,7 +449,7 @@ class ConvMKLDNNOpKernel : public paddle::framework::OpKernel { const float* filter_data = filter->data(); std::vector src_tz = paddle::framework::vectorize2int(input->dims()); - std::vector weights_tz = + std::vector weights_tz = paddle::framework::vectorize2int(filter->dims()); int g = std::max(groups, 1); if (g > 1) { @@ -371,20 +467,28 @@ class ConvMKLDNNOpKernel : public paddle::framework::OpKernel { std::vector dst_tz = paddle::framework::vectorize2int(output->dims()); // Get unique name for storing MKLDNN primitives - const std::string key = ConvMKLDNNHandler::GetHash( - src_tz, weights_tz, strides, paddings, dilations, groups, - ctx.op().Output("Output")); + handle_key keyhandler; + key_desc key_dsr = {src_tz, weights_tz, strides, paddings, dilations, groups, ctx.op().Output("Output")}; + + static std::unordered_map key_map; + static std::shared_ptr> key_suffix_map(new std::unordered_map({})); + bool key_reuse = true; + std::string none_key = ""; + if(keyhandler.GetKeyMap(key_map, key_dsr) == none_key){ + key_reuse = false; + } + std::string key; + if(!key_reuse){ + key = ConvMKLDNNHandler::GetHash( + src_tz, weights_tz, strides, paddings, dilations, groups, + ctx.op().Output("Output")); + keyhandler.SetKeyMap(key_map, key_dsr, key); + } else{ + key = keyhandler.GetKeyMap(key_map, key_dsr); + } const std::string key_conv_pd = key + "@conv_pd"; static std::unordered_map>> scale_map; - //scale_map.insert({key_conv_pd,{1.0f}}); - //scale_map[key_conv_pd]={0.1f}; bool scale_reuse = true; - //auto scale_in_key = key + "@scale_in"; - //auto scale_weights_key = key + "@scale_weights"; - //auto scale_out_key = key + "@scale_out"; - //auto output_shift_scale_key = key + "@output_shift_scale"; - //auto sum_scale_key = key + "@sum_scale"; - //auto scale_in_eltwise_key = key + "@scale_in_eltwise"; std::vector scale_in_data; std::vector scale_out_data; std::vector scale_weights_data; @@ -610,6 +714,7 @@ class ConvMKLDNNOpKernel : public paddle::framework::OpKernel { dev_ctx.SetBlob(key_conv_pd, conv_pd); ConvMKLDNNHandler handler(conv_pd, dev_ctx, mkldnn_engine, key); + handler.key_suffix_map_ = key_suffix_map; // create mkldnn memory from input tensors (data/weights) auto user_src_memory_p = diff --git a/paddle/fluid/platform/mkldnn_helper.h b/paddle/fluid/platform/mkldnn_helper.h index 327c0bbd903..ce03767f676 100644 --- a/paddle/fluid/platform/mkldnn_helper.h +++ b/paddle/fluid/platform/mkldnn_helper.h @@ -115,6 +115,28 @@ class MKLDNNHandler { key_(base_key), is_reusing_(false) {} + struct key_suffix_desc{ + struct Hash{ + std::size_t operator()(const key_suffix_desc &dsr) const{ + int a = std::atoi(dsr.key.c_str()); + int b = std::atoi(dsr.suffix.c_str()); + std::hash hasher; + return hasher( (a << 8) + + (b << 8 * 2)); + } + }; + + std::string key; + std::string suffix; + + key_suffix_desc(std::string key, std::string suffix): key(key), suffix(suffix) {} + + bool operator==(const key_suffix_desc o) const{ + return(key == o.key && suffix == o.suffix); + } + bool operator!=(const key_suffix_desc& o) const { return !(*this == o); } + }; + std::shared_ptr AcquireSrcMemory( const mkldnn::memory::desc& md, void* ptr) { return this->AcquireMemory(md, ptr, "@user_src_mem_p"); @@ -148,7 +170,20 @@ class MKLDNNHandler { std::shared_ptr AcquireMemoryFromPrimitive( mkldnn::memory::primitive_desc mdp, void* ptr, const std::string& suffix) { - auto local_key = key_ + suffix; + std::string local_key; + if(key_suffix_map_) { + key_suffix_desc dsr = {key_ , suffix}; + if(GetKeySuffixMap(key_suffix_map_, dsr) == ""){ +//std::cout<<"create key!!!!!!!"<(dev_ctx_.GetBlob(local_key)); PADDLE_ENFORCE((mem_p != nullptr) || (is_reusing_ == false), @@ -170,7 +205,20 @@ class MKLDNNHandler { void* ptr, const std::string& suffix) { /*Generate key*/ - auto local_key = key_ + suffix; + std::string local_key; + if(key_suffix_map_){ + key_suffix_desc dsr = {key_ , suffix}; + if(GetKeySuffixMap(key_suffix_map_, dsr) == ""){ +//std::cout<<"create key!!!!!!!"<(dev_ctx_.GetBlob(local_key)); PADDLE_ENFORCE((mem_p != nullptr) || (is_reusing_ == false), @@ -193,7 +241,21 @@ class MKLDNNHandler { const std::shared_ptr& target_memory_p, const std::string& suffix, std::vector& pipeline) { // NOLINT - auto local_key = key_ + suffix; + + std::string local_key; + if(key_suffix_map_){ + key_suffix_desc dsr = {key_ , suffix}; + if(GetKeySuffixMap(key_suffix_map_, dsr) == ""){ +//std::cout<<"create key!!!!!!!"<( @@ -222,7 +284,20 @@ class MKLDNNHandler { std::vector scale_data = {1.0f}, int mask = 0) { // create reorder primitive if the input format is not the preferred one - auto local_key = key_ + suffix; + std::string local_key; + if(key_suffix_map_){ + key_suffix_desc dsr = {key_ , suffix}; + if(GetKeySuffixMap(key_suffix_map_, dsr) == ""){ +//std::cout<<"create key!!!!!!!"<> key_suffix_map, key_suffix_desc key_suffix_dsr, std::string key){ + auto it = (*key_suffix_map).find(key_suffix_dsr); + if (it == (*key_suffix_map).end()) { + (*key_suffix_map)[key_suffix_dsr] = key; // create new blob + } else { + (*it).second = key; // set data to existing blob + } + return; + } + + std::string GetKeySuffixMap(std::shared_ptr> key_suffix_map, key_suffix_desc key_suffix_dsr){ + auto it = (*key_suffix_map).find(key_suffix_dsr); + if (it != (*key_suffix_map).end()) { + return (*it).second; + } + return ""; + } + + std::shared_ptr> key_suffix_map_; + protected: static std::string dims2str(const mkldnn::memory::dims& operand_dims) { std::string dstr = ""; -- GitLab