提交 af233753 编写于 作者: A Alexander Alekhin

Merge pull request #15861 from dkurt:dnn_fix_get_input_layers

...@@ -3189,14 +3189,11 @@ Ptr<Layer> Net::getLayer(LayerId layerId) ...@@ -3189,14 +3189,11 @@ Ptr<Layer> Net::getLayer(LayerId layerId)
std::vector<Ptr<Layer> > Net::getLayerInputs(LayerId layerId) std::vector<Ptr<Layer> > Net::getLayerInputs(LayerId layerId)
{ {
LayerData &ld = impl->getLayerData(layerId); LayerData &ld = impl->getLayerData(layerId);
if (!ld.layerInstance)
CV_Error(Error::StsNullPtr, format("Requested layer \"%s\" was not initialized", ld.name.c_str()));
std::vector<Ptr<Layer> > inputLayers; std::vector<Ptr<Layer> > inputLayers;
inputLayers.reserve(ld.inputLayersId.size()); inputLayers.reserve(ld.inputBlobsId.size());
std::set<int>::iterator it; for (int i = 0; i < ld.inputBlobsId.size(); ++i) {
for (it = ld.inputLayersId.begin(); it != ld.inputLayersId.end(); ++it) { inputLayers.push_back(getLayer(ld.inputBlobsId[i].lid));
inputLayers.push_back(getLayer(*it));
} }
return inputLayers; return inputLayers;
} }
......
...@@ -86,6 +86,8 @@ TEST_P(dump, Regression) ...@@ -86,6 +86,8 @@ TEST_P(dump, Regression)
Net net = readNet(findDataFile("dnn/squeezenet_v1.1.prototxt"), Net net = readNet(findDataFile("dnn/squeezenet_v1.1.prototxt"),
findDataFile("dnn/squeezenet_v1.1.caffemodel", false)); findDataFile("dnn/squeezenet_v1.1.caffemodel", false));
ASSERT_EQ(net.getLayerInputs(net.getLayerId("fire2/concat")).size(), 2);
int size[] = {1, 3, 227, 227}; int size[] = {1, 3, 227, 227};
Mat input = cv::Mat::ones(4, size, CV_32F); Mat input = cv::Mat::ones(4, size, CV_32F);
net.setInput(input); net.setInput(input);
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册