提交 3a445181 编写于 作者: S syyxsxx

add namespace InferenceEngine::

上级 26466dcd
......@@ -207,8 +207,8 @@ bool Transforms::Run(cv::Mat* im, ImageBlob* data) {
}
(*im).convertTo(*im, CV_32FC3);
if (type_ == "detector") {
LockedMemory<void> input2Mapped = as<MemoryBlob>(
data->ori_im_size_)->wmap();
InferenceEngine::LockedMemory<void> input2Mapped =
as<InferenceEngine::MemoryBlob>(data->ori_im_size_)->wmap();
float *p = input2Mapped.as<float*>();
p[0] = im->rows;
p[1] = im->cols;
......@@ -229,8 +229,8 @@ bool Transforms::Run(cv::Mat* im, ImageBlob* data) {
const size_t width = blobSize[3];
const size_t height = blobSize[2];
const size_t channels = blobSize[1];
MemoryBlob::Ptr mblob = InferenceEngine::as<InferenceEngine::MemoryBlob>(
data->blob);
InferenceEngine::MemoryBlob::Ptr mblob =
InferenceEngine::as<InferenceEngine::MemoryBlob>(data->blob);
auto mblobHolder = mblob->wmap();
float *blob_data = mblobHolder.as<float *>();
for (size_t c = 0; c < channels; c++) {
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册