diff --git a/mindspore/ccsrc/ir/pattern_matcher.h b/mindspore/ccsrc/ir/pattern_matcher.h index 97a546fad504c025224fd06cdbef1b8c76dbc637..64703a22d07ee276c920e611f9d1cc700a51361e 100644 --- a/mindspore/ccsrc/ir/pattern_matcher.h +++ b/mindspore/ccsrc/ir/pattern_matcher.h @@ -541,6 +541,9 @@ class PConstant : public PBase > { data_out[i] *= data_2[0]; } } else { + if (in_data_2_size < out_data_size) { + MS_EXCEPTION(ValueError) << "in_data_2_size is smaller than out_data_size."; + } for (int i = 0; i < out_data_size; i++) { data_out[i] *= data_2[i]; } @@ -595,33 +598,41 @@ class PConstant : public PBase > { return nullptr; } - void *data_out; + auto new_tensor_ptr = std::make_shared(tensor_3_type_ptr->type_id(), tensor_out_shape); + size_t mem_size = GetTypeByte(tensor_3_type_ptr) * IntToSize(new_tensor_ptr->ElementsNum()); + char *data = reinterpret_cast(new_tensor_ptr->data_c()); + int ret = 0; + void *data_out = nullptr; if ((tensor_3_type_ptr->type_id() == TypeId::kNumberTypeFloat32) || (tensor_3_type_ptr->type_id() == TypeId::kNumberTypeFloat)) { Multiply(tensor_ptr_1->data_c(), tensor_ptr_1->DataSize(), tensor_ptr_2->data_c(), tensor_ptr_2->DataSize(), &data_out, data_out_size); + ret = memcpy_s(data, mem_size, data_out, mem_size); + delete[] reinterpret_cast(data_out); } else { if (tensor_3_type_ptr->type_id() == TypeId::kNumberTypeFloat64) { Multiply(tensor_ptr_1->data_c(), tensor_ptr_1->DataSize(), tensor_ptr_2->data_c(), tensor_ptr_2->DataSize(), &data_out, data_out_size); + ret = memcpy_s(data, mem_size, data_out, mem_size); + delete[] reinterpret_cast(data_out); } else { if ((tensor_3_type_ptr->type_id() == TypeId::kNumberTypeInt32) || (tensor_3_type_ptr->type_id() == TypeId::kNumberTypeInt)) { Multiply(tensor_ptr_1->data_c(), tensor_ptr_1->DataSize(), tensor_ptr_2->data_c(), tensor_ptr_2->DataSize(), &data_out, data_out_size); + ret = memcpy_s(data, mem_size, data_out, mem_size); + delete[] reinterpret_cast(data_out); } else { // Un-support data types return nullptr; } } } - - auto new_tensor_ptr = std::make_shared(tensor_3_type_ptr->type_id(), tensor_out_shape); - size_t mem_size = GetTypeByte(tensor_3_type_ptr) * IntToSize(new_tensor_ptr->ElementsNum()); - char *data = reinterpret_cast(new_tensor_ptr->data_c()); - memcpy(data, data_out, mem_size); - + if (ret != 0) { + MS_LOG(EXCEPTION) << "memcpy_s error, errorno " << ret << ", source size " << mem_size << "dest size" + << new_tensor_ptr->DataSize(); + } auto new_vnode = NewValueNode(new_tensor_ptr); new_vnode->set_abstract(new_tensor_ptr->ToAbstract()); return new_vnode; diff --git a/mindspore/ccsrc/ir/tensor.cc b/mindspore/ccsrc/ir/tensor.cc index 8213bb689ce7e9d1e9579b8d2e2b2ecfee75fbfb..093a39db4774e5b9210ebed8d341ad9a2db0f960 100644 --- a/mindspore/ccsrc/ir/tensor.cc +++ b/mindspore/ccsrc/ir/tensor.cc @@ -125,6 +125,7 @@ template class TensorDataImpl : public TensorData { public: explicit TensorDataImpl(const std::vector &shape) : ndim_(shape.size()), data_size_(SizeOf(shape)) {} + ~TensorDataImpl() = default; TensorDataImpl(const std::vector &shape, void *data, size_t data_len) : ndim_(shape.size()), data_size_(SizeOf(shape)), data_(CopyData(shape, data, data_len)) {} @@ -288,7 +289,7 @@ class TensorDataImpl : public TensorData { }; template -TensorDataPtr MakeTensorData(TypeId data_type, const std::vector &shape, Args... args) { +TensorDataPtr MakeTensorData(TypeId data_type, const std::vector &shape, const Args... args) { switch (data_type) { case kNumberTypeBool: case kNumberTypeUInt8: diff --git a/mindspore/ccsrc/optimizer/ad/dfunctor.cc b/mindspore/ccsrc/optimizer/ad/dfunctor.cc index f9c056a84ef49161080352f788f9c6e991c9571f..308f1dd352bef9110395f416d816305cd9e70f29 100644 --- a/mindspore/ccsrc/optimizer/ad/dfunctor.cc +++ b/mindspore/ccsrc/optimizer/ad/dfunctor.cc @@ -99,14 +99,14 @@ void DFunctor::BackPropagateFv(const AnfNodePtr &fv, const AnfNodePtr &din) { fv_adjoint = anfnode_to_adjoin_indirect_fv_.find(fv); } } - auto key = tape_->NewCNode({NewValueNode(prim::kPrimEmbed), fv_adjoint->second->k()}); - fv_adjoint->second->RegisterKUser(key, 1); + auto node = tape_->NewCNode({NewValueNode(prim::kPrimEmbed), fv_adjoint->second->k()}); + fv_adjoint->second->RegisterKUser(node, 1); auto default_val = tape_->NewCNode({NewValueNode(prim::GetPythonOps("zeros_like")), fv_adjoint->second->k()}); fv_adjoint->second->RegisterKUser(default_val, 1); - auto dfv = tape_->NewCNode({NewValueNode(prim::kPrimEnvGetItem), din, key, default_val}); + auto dfv = tape_->NewCNode({NewValueNode(prim::kPrimEnvGetItem), din, node, default_val}); MS_LOG(DEBUG) << "BackPropagateFv find adjoint in anfnode_to_adjoin_ or anfnode_to_adjoin_indirect_fv_ fv " << fv->func_graph()->ToString() << " " << fv->ToString() << "."; - MS_LOG(DEBUG) << "BackPropagateFv get item from " << din->ToString() << " key " << key->ToString() << "."; + MS_LOG(DEBUG) << "BackPropagateFv get item from " << din->ToString() << " key " << node->ToString() << "."; fv_adjoint->second->AccumulateDout(dfv); } @@ -279,13 +279,13 @@ AnfNodePtr DFunctor::AttachFvDoutToTape(const AnfNodePtr &grad_fv) { if (fv_adjoint == anfnode_to_adjoin_.end()) { MS_LOG(EXCEPTION) << "AttachFvDoutToTape fv adjoint does not exist " << fv->ToString() << "."; } - auto key = tape_->NewCNode({NewValueNode(prim::kPrimEmbed), fv_adjoint->second->k()}); - fv_adjoint->second->RegisterKUser(key, 1); + auto node = tape_->NewCNode({NewValueNode(prim::kPrimEmbed), fv_adjoint->second->k()}); + fv_adjoint->second->RegisterKUser(node, 1); auto sens = fv_adjoint->second->dout(); new_grad_fv = tape_->NewCNode({ NewValueNode(prim::kPrimEnvSetItem), new_grad_fv, - key, + node, sens, }); fv_adjoint->second->RegisterDoutUser(new_grad_fv->cast(), 3); @@ -301,13 +301,13 @@ AnfNodePtr DFunctor::AttachIndirectFvDoutToTape(const AnfNodePtr &grad_fv) { for (auto &fv_adjoint : anfnode_to_adjoin_indirect_fv_) { MS_LOG(DEBUG) << "AttachIndirectFvDoutToTape backprop indirect fv " << fv_adjoint.first->ToString() << " " << primal_graph_->ToString() << "."; - auto key = tape_->NewCNode({NewValueNode(prim::kPrimEmbed), fv_adjoint.second->k()}); - fv_adjoint.second->RegisterKUser(key, 1); + auto node = tape_->NewCNode({NewValueNode(prim::kPrimEmbed), fv_adjoint.second->k()}); + fv_adjoint.second->RegisterKUser(node, 1); auto sens = fv_adjoint.second->dout(); new_grad_fv = tape_->NewCNode({ NewValueNode(prim::kPrimEnvSetItem), new_grad_fv, - key, + node, sens, }); fv_adjoint.second->RegisterDoutUser(new_grad_fv->cast(), 3); diff --git a/mindspore/ccsrc/utils/load_onnx/anf_converter.cc b/mindspore/ccsrc/utils/load_onnx/anf_converter.cc index ad87d6ae8fbdf2bf14adf31f30fa3688ce8948c6..9e8e51a46b48371b4ca6f3a3a950675558f502cc 100644 --- a/mindspore/ccsrc/utils/load_onnx/anf_converter.cc +++ b/mindspore/ccsrc/utils/load_onnx/anf_converter.cc @@ -60,6 +60,9 @@ int AnfConverter::ValidateFileStr(const std::string &modelFile, std::string file bool AnfConverter::ReadOnnxFromBinary(const std::string &modelFile, google::protobuf::Message *onnx_model) { std::unique_ptr onnx_file(new (std::nothrow) char[PATH_MAX]{0}); int fd = open(onnx_file.get(), O_RDONLY); + if (fd < 0) { + MS_LOG(EXCEPTION) << "failed to open file"; + } google::protobuf::io::FileInputStream input(fd); google::protobuf::io::CodedInputStream code_input(&input); code_input.SetTotalBytesLimit(INT_MAX, 536870912); @@ -85,7 +88,7 @@ std::shared_ptr AnfConverter::RunAnfConverter(const std::string &file MS_LOG(ERROR) << "Trans data not support input format!"; } else { modelFile = flagItem.substr(pos + 1); - std::cout << "input protobuf file path is: " << flagItem.substr(pos + 1) << std::endl; + std::cout << "input protobuf file path is: " << modelFile << std::endl; } if (ValidateFileStr(modelFile, ".pb") != 0) { diff --git a/mindspore/ccsrc/utils/load_onnx/anf_model_parser.cc b/mindspore/ccsrc/utils/load_onnx/anf_model_parser.cc index 77521205220689da6ffd72aece974853b8e06afd..ac7fe1564a9edc1e3bc7bb67b9a83baacf7a79ce 100644 --- a/mindspore/ccsrc/utils/load_onnx/anf_model_parser.cc +++ b/mindspore/ccsrc/utils/load_onnx/anf_model_parser.cc @@ -119,7 +119,10 @@ bool MSANFModelParser::BuildParameterForFuncGraph(const ParameterPtr &node, cons std::string initial_data = initialize_proto.raw_data(); auto *tensor_data_buf = reinterpret_cast(tensor_info->data_c()); MS_EXCEPTION_IF_NULL(tensor_data_buf); - memcpy_s(tensor_data_buf, tensor_info->data().nbytes(), initial_data.data(), initial_data.size()); + auto ret = memcpy_s(tensor_data_buf, tensor_info->data().nbytes(), initial_data.data(), initial_data.size()); + if (ret != 0) { + MS_LOG(EXCEPTION) << "memcpy_s error, errorno" << ret; + } auto param_value = std::make_shared(); MS_EXCEPTION_IF_NULL(param_value); @@ -249,7 +252,11 @@ bool MSANFModelParser::ObtainValueNodeInTensorForm(const std::string &value_node tensor::TensorPtr tensor_info = std::make_shared(kDefaultValueSwitchMap[attr_tensor_type], shape); const std::string &tensor_buf = attr_tensor.raw_data(); auto *tensor_data_buf = reinterpret_cast(tensor_info->data_c()); - memcpy_s(tensor_data_buf, tensor_info->data().nbytes(), tensor_buf.data(), tensor_buf.size()); + auto ret = memcpy_s(tensor_data_buf, tensor_info->data().nbytes(), tensor_buf.data(), tensor_buf.size()); + if (ret != 0) { + MS_LOG(EXCEPTION) << "memcpy_s error, errorno" << ret; + } + auto new_value_node = NewValueNode(MakeValue(tensor_info)); MS_EXCEPTION_IF_NULL(new_value_node); auto tensor_abstract = tensor_info->ToAbstract(); @@ -336,7 +343,6 @@ bool MSANFModelParser::GetAttrValueForValueNode(const std::string &ref_attr_name MS_LOG(ERROR) << "parse ValueNode value don't support input of ref_attr_name"; return false; } - return true; } bool MSANFModelParser::BuildValueNodeForFuncGraph(const onnx::NodeProto &node_proto) { diff --git a/mindspore/ccsrc/utils/load_onnx/anf_model_parser.h b/mindspore/ccsrc/utils/load_onnx/anf_model_parser.h index 11b9cd101f8d83abb4ed5ed569a9a5d28b1f82f9..58fbd1bc7073ceb683d1bd5fb3e55c60c22a66ae 100644 --- a/mindspore/ccsrc/utils/load_onnx/anf_model_parser.h +++ b/mindspore/ccsrc/utils/load_onnx/anf_model_parser.h @@ -32,7 +32,7 @@ using uint64 = uint64_t; using float16 = Eigen::half; class MSANFModelParser { public: - MSANFModelParser() = default; + MSANFModelParser() : producer_name_(""), model_version_(0), ir_version_(0) {} ~MSANFModelParser() = default; FuncGraphPtr Parse(const onnx::ModelProto &model_proto);