提交 bd955c75 编写于 作者: M mindspore-ci-bot 提交者: Gitee

!5067 fix post training quant log

Merge pull request !5067 from xutianchun/quant_0824
......@@ -69,6 +69,10 @@ int AnfExporter::ConvertQuantParam(const std::unique_ptr<schema::MetaGraphT> &me
// activation
auto input_quant_params = primitive->GetInputQuantParams();
auto node_type = (schema::PrimitiveType)primitive->Type();
if (input_quant_params.empty()) {
MS_LOG(ERROR) << "node: " << dst_node->name << " input quant params is empty";
return RET_ERROR;
}
for (size_t i = 0; i < input_quant_params.size(); i++) {
if (i >= dst_node->inputIndex.size()) {
MS_LOG(ERROR) << "node: " << dst_node->name << " input has " << input_quant_params.size()
......@@ -93,7 +97,10 @@ int AnfExporter::ConvertQuantParam(const std::unique_ptr<schema::MetaGraphT> &me
auto tensor_output = meta_graph->allTensors[output_index].get();
auto output_quant_params = primitive->GetOutputQuantParams();
if (output_quant_params.empty()) {
MS_LOG(WARNING) << "node: " << dst_node->name << " output quant params is empty";
if (node_type != schema::PrimitiveType_QuantDTypeCast) {
MS_LOG(ERROR) << "node: " << dst_node->name << " output quant params is empty";
return RET_ERROR;
}
} else {
for (auto output_quant_param : output_quant_params[0]) {
if (tensor_output->quantParams.empty()) {
......
......@@ -168,11 +168,11 @@ STATUS CalQuantizationParams(schema::QuantParamT *quantParam, double mMin, doubl
int quant_max, int quant_min, int num_bits) {
MS_ASSERT(quantParam != nullptr);
if (mMin > 0.0f) {
MS_LOG(ERROR) << "min " << mMin << " is bigger then 0, set to 0, this may course low precision";
MS_LOG(DEBUG) << "min " << mMin << " is bigger then 0, set to 0, this may course low precision";
mMin = 0.0f;
}
if (mMax < 0.0f) {
MS_LOG(ERROR) << "mMax " << mMax << " is smaller than 0, set to 0, this may course low precision";
MS_LOG(DEBUG) << "mMax " << mMax << " is smaller than 0, set to 0, this may course low precision";
mMax = 0.0f;
}
if (mMin > mMax) {
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册