提交 85719a0a 编写于 作者: A Alexander Alekhin

dnn: support outputs registration under new names

- fixed ONNX importer
上级 b5b52afd
...@@ -489,6 +489,18 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN ...@@ -489,6 +489,18 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN
*/ */
void connect(int outLayerId, int outNum, int inpLayerId, int inpNum); void connect(int outLayerId, int outNum, int inpLayerId, int inpNum);
/** @brief Registers network output with name
*
* Function may create additional 'Identity' layer.
*
* @param outputName identifier of the output
* @param layerId identifier of the second layer
* @param outputPort number of the second layer input
*
* @returns index of bound layer (the same as layerId or newly created)
*/
int registerOutput(const std::string& outputName, int layerId, int outputPort);
/** @brief Sets outputs names of the network input pseudo layer. /** @brief Sets outputs names of the network input pseudo layer.
* *
* Each net always has special own the network input pseudo layer with id=0. * Each net always has special own the network input pseudo layer with id=0.
...@@ -610,10 +622,14 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN ...@@ -610,10 +622,14 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN
CV_WRAP inline Mat getParam(const String& layerName, int numParam = 0) const { return getParam(getLayerId(layerName), numParam); } CV_WRAP inline Mat getParam(const String& layerName, int numParam = 0) const { return getParam(getLayerId(layerName), numParam); }
/** @brief Returns indexes of layers with unconnected outputs. /** @brief Returns indexes of layers with unconnected outputs.
*
* FIXIT: Rework API to registerOutput() approach, deprecate this call
*/ */
CV_WRAP std::vector<int> getUnconnectedOutLayers() const; CV_WRAP std::vector<int> getUnconnectedOutLayers() const;
/** @brief Returns names of layers with unconnected outputs. /** @brief Returns names of layers with unconnected outputs.
*
* FIXIT: Rework API to registerOutput() approach, deprecate this call
*/ */
CV_WRAP std::vector<String> getUnconnectedOutLayersNames() const; CV_WRAP std::vector<String> getUnconnectedOutLayersNames() const;
......
...@@ -1135,6 +1135,7 @@ struct Net::Impl : public detail::NetImplBase ...@@ -1135,6 +1135,7 @@ struct Net::Impl : public detail::NetImplBase
std::vector<LayerPin> blobsToKeep; std::vector<LayerPin> blobsToKeep;
MapIdToLayerData layers; MapIdToLayerData layers;
std::map<String, int> layerNameToId; std::map<String, int> layerNameToId;
std::map<std::string, int> outputNameToId; // use registerOutput() to populate outputs
BlobManager blobManager; BlobManager blobManager;
int preferableBackend; int preferableBackend;
int preferableTarget; int preferableTarget;
...@@ -1483,6 +1484,23 @@ struct Net::Impl : public detail::NetImplBase ...@@ -1483,6 +1484,23 @@ struct Net::Impl : public detail::NetImplBase
return pins; return pins;
} }
int addLayer(const String &name, const String &type, LayerParams &params)
{
if (getLayerId(name) >= 0)
{
CV_Error(Error::StsBadArg, "Layer \"" + name + "\" already into net");
return -1;
}
int id = ++lastLayerId;
layerNameToId.insert(std::make_pair(name, id));
layers.insert(std::make_pair(id, LayerData(id, name, type, params)));
if (params.get<bool>("has_dynamic_shapes", false))
hasDynamicShapes = true;
return id;
}
void connect(int outLayerId, int outNum, int inLayerId, int inNum) void connect(int outLayerId, int outNum, int inLayerId, int inNum)
{ {
CV_Assert(outLayerId < inLayerId); CV_Assert(outLayerId < inLayerId);
...@@ -1492,6 +1510,39 @@ struct Net::Impl : public detail::NetImplBase ...@@ -1492,6 +1510,39 @@ struct Net::Impl : public detail::NetImplBase
addLayerInput(ldInp, inNum, LayerPin(outLayerId, outNum)); addLayerInput(ldInp, inNum, LayerPin(outLayerId, outNum));
ldOut.requiredOutputs.insert(outNum); ldOut.requiredOutputs.insert(outNum);
ldOut.consumers.push_back(LayerPin(inLayerId, outNum)); ldOut.consumers.push_back(LayerPin(inLayerId, outNum));
CV_LOG_VERBOSE(NULL, 0, "DNN: connect(" << outLayerId << ":" << outNum << " ==> " << inLayerId << ":" << inNum << ")");
}
int registerOutput(const std::string& outputName, int layerId, int outputPort)
{
int checkLayerId = getLayerId(outputName);
if (checkLayerId >= 0)
{
if (checkLayerId == layerId)
{
if (outputPort == 0)
{
// layer name correlates with its output name
CV_LOG_DEBUG(NULL, "DNN: register output='" << outputName << "': reuse layer with the same name and id=" << layerId << " to be linked");
outputNameToId.insert(std::make_pair(outputName, layerId));
return checkLayerId;
}
}
CV_Error_(Error::StsBadArg, ("Layer with name='%s' already exists id=%d (to be linked with %d:%d)", outputName.c_str(), checkLayerId, layerId, outputPort));
}
#if 0 // TODO
if (outputPort == 0)
// make alias only, need to adopt getUnconnectedOutLayers() call
#endif
LayerParams outputLayerParams;
outputLayerParams.name = outputName;
outputLayerParams.type = "Identity";
int outputLayerId = addLayer(outputLayerParams.name, outputLayerParams.type, outputLayerParams);
connect(layerId, outputPort, outputLayerId, 0);
CV_LOG_DEBUG(NULL, "DNN: register output='" << outputName << "' id=" << outputLayerId << " defined as " << layerId << ":" << outputPort);
outputNameToId.insert(std::make_pair(outputName, outputLayerId));
return outputLayerId;
} }
void initBackend(const std::vector<LayerPin>& blobsToKeep_) void initBackend(const std::vector<LayerPin>& blobsToKeep_)
...@@ -3599,20 +3650,8 @@ Net::~Net() ...@@ -3599,20 +3650,8 @@ Net::~Net()
int Net::addLayer(const String &name, const String &type, LayerParams &params) int Net::addLayer(const String &name, const String &type, LayerParams &params)
{ {
CV_TRACE_FUNCTION(); CV_TRACE_FUNCTION();
CV_Assert(impl);
if (impl->getLayerId(name) >= 0) return impl->addLayer(name, type, params);
{
CV_Error(Error::StsBadArg, "Layer \"" + name + "\" already into net");
return -1;
}
int id = ++impl->lastLayerId;
impl->layerNameToId.insert(std::make_pair(name, id));
impl->layers.insert(std::make_pair(id, LayerData(id, name, type, params)));
if (params.get<bool>("has_dynamic_shapes", false))
impl->hasDynamicShapes = true;
return id;
} }
int Net::addLayerToPrev(const String &name, const String &type, LayerParams &params) int Net::addLayerToPrev(const String &name, const String &type, LayerParams &params)
...@@ -3644,6 +3683,13 @@ void Net::connect(String _outPin, String _inPin) ...@@ -3644,6 +3683,13 @@ void Net::connect(String _outPin, String _inPin)
impl->connect(outPin.lid, outPin.oid, inpPin.lid, inpPin.oid); impl->connect(outPin.lid, outPin.oid, inpPin.lid, inpPin.oid);
} }
int Net::registerOutput(const std::string& outputName, int layerId, int outputPort)
{
CV_TRACE_FUNCTION();
CV_Assert(impl);
return impl->registerOutput(outputName, layerId, outputPort);
}
Mat Net::forward(const String& outputName) Mat Net::forward(const String& outputName)
{ {
CV_TRACE_FUNCTION(); CV_TRACE_FUNCTION();
...@@ -4328,8 +4374,22 @@ bool Net::empty() const ...@@ -4328,8 +4374,22 @@ bool Net::empty() const
std::vector<int> Net::getUnconnectedOutLayers() const std::vector<int> Net::getUnconnectedOutLayers() const
{ {
CV_TRACE_FUNCTION();
CV_Assert(impl);
std::vector<int> layersIds; std::vector<int> layersIds;
// registerOutput() flow
const std::map<std::string, int>& outputNameToId = impl->outputNameToId;
if (!outputNameToId.empty())
{
for (std::map<std::string, int>::const_iterator it = outputNameToId.begin(); it != outputNameToId.end(); ++it)
{
layersIds.push_back(it->second);
}
return layersIds;
}
Impl::MapIdToLayerData::const_iterator it; Impl::MapIdToLayerData::const_iterator it;
for (it = impl->layers.begin(); it != impl->layers.end(); it++) for (it = impl->layers.begin(); it != impl->layers.end(); it++)
{ {
......
...@@ -130,6 +130,7 @@ protected: ...@@ -130,6 +130,7 @@ protected:
std::map<std::string, LayerInfo> layer_id; std::map<std::string, LayerInfo> layer_id;
typedef std::map<std::string, LayerInfo>::iterator IterLayerId_t; typedef std::map<std::string, LayerInfo>::iterator IterLayerId_t;
typedef std::map<std::string, LayerInfo>::const_iterator ConstIterLayerId_t;
void handleNode(const opencv_onnx::NodeProto& node_proto); void handleNode(const opencv_onnx::NodeProto& node_proto);
...@@ -687,9 +688,31 @@ void ONNXImporter::populateNet() ...@@ -687,9 +688,31 @@ void ONNXImporter::populateNet()
handleNode(node_proto); handleNode(node_proto);
} }
// register outputs
for (int i = 0; i < graph_proto.output_size(); ++i)
{
const std::string& output_name = graph_proto.output(i).name();
if (output_name.empty())
{
CV_LOG_ERROR(NULL, "DNN/ONNX: can't register output without name: " << i);
continue;
}
ConstIterLayerId_t layerIt = layer_id.find(output_name);
if (layerIt == layer_id.end())
{
CV_LOG_ERROR(NULL, "DNN/ONNX: can't find layer for output name: '" << output_name << "'. Does model imported properly?");
continue;
}
const LayerInfo& li = layerIt->second;
int outputId = dstNet.registerOutput(output_name, li.layerId, li.outputId); CV_UNUSED(outputId);
// no need to duplicate message from engine: CV_LOG_DEBUG(NULL, "DNN/ONNX: registered output='" << output_name << "' with id=" << outputId);
}
CV_LOG_DEBUG(NULL, "DNN/ONNX: import completed!"); CV_LOG_DEBUG(NULL, "DNN/ONNX: import completed!");
} }
static
const std::string& extractNodeName(const opencv_onnx::NodeProto& node_proto) const std::string& extractNodeName(const opencv_onnx::NodeProto& node_proto)
{ {
if (node_proto.has_name() && !node_proto.name().empty()) if (node_proto.has_name() && !node_proto.name().empty())
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册