diff --git a/mindspore/ccsrc/operator/composite/unpack_call.h b/mindspore/ccsrc/operator/composite/unpack_call.h index 2f39615c1a66b70c7721799a0fcf7cf39313bbff..8c055a938649c506c6f4c0decaa8ff9c88691ad6 100644 --- a/mindspore/ccsrc/operator/composite/unpack_call.h +++ b/mindspore/ccsrc/operator/composite/unpack_call.h @@ -35,7 +35,6 @@ namespace mindspore { // namespace to support composite operators definition namespace prim { - // Expand the tuple and dict parameters generated when parsing the function call, // and generate positional parameters and key-value pairs for function. class UnpackCall : public MetaFuncGraph { @@ -47,7 +46,6 @@ class UnpackCall : public MetaFuncGraph { friend bool operator==(const UnpackCall &lhs, const UnpackCall &rhs) { return lhs.name_ == rhs.name_; } }; using UnpackCallPtr = std::shared_ptr; - } // namespace prim } // namespace mindspore diff --git a/mindspore/ccsrc/pipeline/pipeline.cc b/mindspore/ccsrc/pipeline/pipeline.cc index 6e2c7be68559f46d5680ea201c5564517572af3f..d04f9617f12b9f93abd1fe493338cd51d1e788f9 100644 --- a/mindspore/ccsrc/pipeline/pipeline.cc +++ b/mindspore/ccsrc/pipeline/pipeline.cc @@ -300,6 +300,10 @@ void ExecutorPy::SaveCompiledGraphToPb(const std::string &phase_s) { // save the graph to file in protobuf format FuncGraphPtr func_graph = info_[phase_s]->resource->func_graph(); MS_EXCEPTION_IF_NULL(func_graph); + if (phase_s.empty()) { + MS_LOG(ERROR) << "`phase` is empty '" << phase_s << "'!"; + return; + } std::string name_prefix = phase_s.substr(0, phase_s.find(".")); std::string pb_filename = std::string("ms_output_") + name_prefix + ".pb"; std::string filename = GetFilePathName(pb_filename); diff --git a/mindspore/nn/wrap/cell_wrapper.py b/mindspore/nn/wrap/cell_wrapper.py index de0007c2ebe16fcb8be74869356493b0b45c07e2..60718ec2b112ecde40f8fa6306643d9bbd1b5818 100644 --- a/mindspore/nn/wrap/cell_wrapper.py +++ b/mindspore/nn/wrap/cell_wrapper.py @@ -304,15 +304,19 @@ class WithEvalCell(Cell): >>> eval_net = nn.WithEvalCell(net, loss_fn) """ - def __init__(self, network, loss_fn): + def __init__(self, network, loss_fn, add_cast_fp32=False): super(WithEvalCell, self).__init__(auto_prefix=False) self._network = network self._loss_fn = loss_fn + self.add_cast_fp32 = add_cast_fp32 + def construct(self, data, label): outputs = self._network(data) - label = _mp_cast_helper(mstype.float32, label) - loss = self._loss_fn(F.cast(outputs, mstype.float32), label) + if self.add_cast_fp32: + label = _mp_cast_helper(mstype.float32, label) + outputs = F.cast(outputs, mstype.float32) + loss = self._loss_fn(outputs, label) return loss, outputs, label diff --git a/mindspore/train/model.py b/mindspore/train/model.py index 5b1a34e41820bb62c6d7ee905105119b2d29643a..c943252e43dba6e17083c2a30fe822188f04ed16 100755 --- a/mindspore/train/model.py +++ b/mindspore/train/model.py @@ -162,7 +162,7 @@ class Model: else: if self._loss_fn is None: raise ValueError("loss_fn can not be None.") - self._eval_network = nn.WithEvalCell(self._network, self._loss_fn) + self._eval_network = nn.WithEvalCell(self._network, self._loss_fn, self._amp_level == "O2") self._eval_indexes = [0, 1, 2] def _build_predict_network(self):