From 7bf00c3a93c4770cb06e8dd17e988e11f79cbf03 Mon Sep 17 00:00:00 2001 From: Jacek Czaja Date: Tue, 8 May 2018 10:34:01 -0700 Subject: [PATCH] - First draft of reusing of softmax mkldnn primitives - Added hash function inside of MKLDNN softmax op to be used as handle for primitives stroing in a context - Style fixes to softmax mkldnn op - Fixes after review - Coding style - Fix to style - style fixes - style fix - style fixes - Fix to cody style check - Rephrasing a comment --- paddle/fluid/operators/softmax_mkldnn_op.cc | 73 +++++++++++++++------ 1 file changed, 54 insertions(+), 19 deletions(-) diff --git a/paddle/fluid/operators/softmax_mkldnn_op.cc b/paddle/fluid/operators/softmax_mkldnn_op.cc index 71b541d98..14b57b11f 100644 --- a/paddle/fluid/operators/softmax_mkldnn_op.cc +++ b/paddle/fluid/operators/softmax_mkldnn_op.cc @@ -53,25 +53,60 @@ class SoftmaxMKLDNNKernel : public paddle::framework::OpKernel { "Softmax input and output dimensions should match"); // Same memory descriptor to be used for input and output memory::dims softmax_tz = {src_tz[0], src_tz[1]}; - // Currently only supports NC data format - // TODO(jczaja-intel): support more formats - auto softmax_md = - MKLDNNMemDesc({softmax_tz}, memory::f32, memory::format::nc); - // Normalization is made after innermost dimension eg. C out of NC - auto softmax_desc = softmax_forward::desc(prop_kind::forward_scoring, - softmax_md, 1 /*dim: C*/); - // create memory primitives - auto softmax_src_memory = - memory({softmax_md, mkldnn_engine}, - static_cast(const_cast(input_data))); - auto softmax_dst_memory = - memory({softmax_md, mkldnn_engine}, - static_cast(const_cast(output_data))); - auto softmax_prim_desc = - softmax_forward::primitive_desc(softmax_desc, mkldnn_engine); - auto softmax = softmax_forward(softmax_prim_desc, softmax_src_memory, - softmax_dst_memory); - std::vector pipeline{softmax}; + // Generate keys for storing/retriving primitives for this operator + // TODO(jczaja): Each MKLDNN operator may have diffrent hashing function + auto gethash = [](memory::dims& operand_dims) { + return std::string(std::to_string(operand_dims[0]) + "-" + + std::to_string(operand_dims[1])); + }; + const std::string key = gethash(softmax_tz); + const std::string key_softmax_p = key + "@softmax_p"; + const std::string key_softmax_src_mem_p = key + "@softmax_src_mem_p"; + const std::string key_softmax_dst_mem_p = key + "@softmax_dst_mem_p"; + + std::shared_ptr softmax_p = dev_ctx.GetBlob(key_softmax_p); + if (softmax_p == nullptr) { + // Currently only NC data format is supported + auto softmax_md = + MKLDNNMemDesc({softmax_tz}, memory::f32, memory::format::nc); + // Normalization is made after innermost dimension eg. C out of NC + auto softmax_desc = softmax_forward::desc(prop_kind::forward_scoring, + softmax_md, 1 /*dim: C*/); + // create memory primitives + auto softmax_src_memory_p = std::make_shared( + memory::primitive_desc{softmax_md, mkldnn_engine}, + static_cast(const_cast(input_data))); + dev_ctx.SetBlob(key_softmax_src_mem_p, softmax_src_memory_p); + auto softmax_dst_memory_p = std::make_shared( + memory::primitive_desc{softmax_md, mkldnn_engine}, + static_cast(output_data)); + dev_ctx.SetBlob(key_softmax_dst_mem_p, softmax_dst_memory_p); + + auto softmax_forward_pd = + std::make_shared(softmax_desc, + mkldnn_engine); + softmax_p = std::make_shared( + *(softmax_forward_pd.get()), + *(static_cast(softmax_src_memory_p.get())), + *(static_cast(softmax_dst_memory_p.get()))); + dev_ctx.SetBlob(key_softmax_p, softmax_p); + } else { + // Primitives already exist + auto src_memory_p = std::static_pointer_cast( + dev_ctx.GetBlob(key_softmax_src_mem_p)); + PADDLE_ENFORCE(src_memory_p != nullptr, + "Fail to find softmax src mem_p in device context"); + auto dst_memory_p = std::static_pointer_cast( + dev_ctx.GetBlob(key_softmax_dst_mem_p)); + PADDLE_ENFORCE(dst_memory_p != nullptr, + "Fail to find softmax dst mem_p in device context"); + src_memory_p->set_data_handle( + reinterpret_cast(const_cast(input_data))); + dst_memory_p->set_data_handle(output_data); + } + + std::vector pipeline{ + *(static_cast(softmax_p.get()))}; stream(stream::kind::eager).submit(pipeline).wait(); const bool is_test = ctx.Attr("is_test"); -- GitLab