提交 3c3c5ef2 编写于 作者: D Dmitry Kurtaev

Fix a dnn bug with retrieving all the output blobs

上级 4f668e10
......@@ -2795,8 +2795,7 @@ void Net::forward(std::vector<std::vector<Mat> >& outputBlobs,
std::vector<LayerPin> pins;
for (int i = 0; i < outBlobNames.size(); i++)
{
std::vector<LayerPin> lp = impl->getLayerOutPins(outBlobNames[i]);
pins.insert(pins.end(), lp.begin(), lp.end());
pins.push_back(impl->getPinByAlias(outBlobNames[i]));
}
impl->setUpNet(pins);
......@@ -2809,9 +2808,10 @@ void Net::forward(std::vector<std::vector<Mat> >& outputBlobs,
for (int i = 0; i < outBlobNames.size(); i++)
{
std::vector<LayerPin> lp = impl->getLayerOutPins(outBlobNames[i]);
for (int i = 0; i < lp.size(); i++)
outputBlobs[i].resize(lp.size());
for (int j = 0; j < lp.size(); j++)
{
outputBlobs[i].push_back(impl->getBlob(lp[i]));
outputBlobs[i][j] = impl->getBlob(lp[j]);
}
}
}
......
......@@ -306,4 +306,38 @@ TEST_P(DeprecatedForward, CustomLayerWithFallback)
INSTANTIATE_TEST_CASE_P(/**/, DeprecatedForward, dnnBackendsAndTargets());
TEST(Net, forwardAndRetrieve)
{
std::string prototxt =
"input: \"data\"\n"
"layer {\n"
" name: \"testLayer\"\n"
" type: \"Slice\"\n"
" bottom: \"data\"\n"
" top: \"firstCopy\"\n"
" top: \"secondCopy\"\n"
" slice_param {\n"
" axis: 0\n"
" slice_point: 2\n"
" }\n"
"}";
Net net = readNetFromCaffe(&prototxt[0], prototxt.size());
net.setPreferableBackend(DNN_BACKEND_OPENCV);
Mat inp(4, 5, CV_32F);
randu(inp, -1, 1);
net.setInput(inp);
std::vector<String> outNames;
outNames.push_back("testLayer");
std::vector<std::vector<Mat> > outBlobs;
net.forward(outBlobs, outNames);
EXPECT_EQ(outBlobs.size(), 1);
EXPECT_EQ(outBlobs[0].size(), 2);
normAssert(outBlobs[0][0], inp.rowRange(0, 2), "first part");
normAssert(outBlobs[0][1], inp.rowRange(2, 4), "second part");
}
}} // namespace
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册