diff --git a/mindspore/ccsrc/session/session_basic.cc b/mindspore/ccsrc/session/session_basic.cc index 7606952a4a51cdc1d58ebdfb44ce20c00f944d0a..db6257c815ad6192e7eaaacf53546757ba21e913 100644 --- a/mindspore/ccsrc/session/session_basic.cc +++ b/mindspore/ccsrc/session/session_basic.cc @@ -32,15 +32,16 @@ #include "pre_activate/common/helper.h" #include "common/utils.h" #include "ir/dtype.h" +#include "ir/anf.h" namespace mindspore { namespace session { -static std::shared_ptr> python_paras_; +static std::shared_ptr> python_paras_; void ClearPythonParasMap() { python_paras_ = nullptr; } namespace { const int kSummaryGetItem = 2; -tensor::TensorPtr GetParamDefaultInputTensor(const AnfNodePtr &node) { +PyObject *GetParamDefaultInputTensor(const AnfNodePtr &node) { if (node == nullptr) { return nullptr; } @@ -50,14 +51,7 @@ tensor::TensorPtr GetParamDefaultInputTensor(const AnfNodePtr &node) { } auto param_value = std::dynamic_pointer_cast(parameter->default_param()); auto py_param = param_value->value(); - if (!py::hasattr(py_param, "default_input")) { - return nullptr; - } - auto py_p_input = py_param.attr("default_input"); - if (!py::hasattr(py_p_input, PYTHON_TENSOR_FLAG)) { - return nullptr; - } - return py_p_input.cast>(); + return py_param.ptr(); } void GetSummaryNodes(const KernelGraph *graph, std::unordered_map> *summary) { @@ -354,15 +348,17 @@ ParameterPtr SessionBasic::CreateNewParameterFromParameter(const AnfNodePtr &anf ParameterPtr new_parameter = nullptr; // if parameter's python parameter has been exist a backend parameter, reuse the exist parameter if (python_paras_ == nullptr) { - python_paras_ = std::make_shared>(); + python_paras_ = std::make_shared>(); } - if (python_paras_->find(m_tensor) != python_paras_->end() && GetGraphIdByNode(anf) != kInvalidGraphId) { + if (python_paras_->find(m_tensor) != python_paras_->end() && GetGraphIdByNode(anf) == kInvalidGraphId) { new_parameter = (*python_paras_)[m_tensor]; } else { + TraceManager::DebugTrace(std::make_shared(anf->debug_info())); new_parameter = graph->NewParameter(anf->cast()); if (m_tensor != nullptr) { (*python_paras_)[m_tensor] = new_parameter; } + TraceManager::EndTrace(); } graph_inputs->push_back(new_parameter); valid_inputs->push_back(valid_input); diff --git a/mindspore/ops/_op_impl/tbe/bias_add_grad.py b/mindspore/ops/_op_impl/tbe/bias_add_grad.py index e59c197bce967b94db7f531ad3e011646b4b99e5..557dececb7890180cc74e8fd4a8c1e98080c4a3b 100644 --- a/mindspore/ops/_op_impl/tbe/bias_add_grad.py +++ b/mindspore/ops/_op_impl/tbe/bias_add_grad.py @@ -26,8 +26,6 @@ bias_add_grad_op_info = TBERegOp("BiasAddGrad") \ .attr("data_format", "required", "str", "all") \ .input(0, "output_backprop", False, "required", "all") \ .output(0, "output", False, "required", "all") \ - .dtype_format(DataType.F16_Default, DataType.F16_Default) \ - .dtype_format(DataType.F16_FracNZ, DataType.F16_Default) \ .dtype_format(DataType.F32_Default, DataType.F32_Default) \ .dtype_format(DataType.F32_FracNZ, DataType.F32_Default) \ .get_op_info()