diff --git a/modules/objdetect/include/opencv2/objdetect/objdetect.hpp b/modules/objdetect/include/opencv2/objdetect/objdetect.hpp index ad5b81f98445cad83fe1b2cb620cdd3349bf659d..0bfb37e05635fc43b870e7fb0336a943ffa8e43e 100644 --- a/modules/objdetect/include/opencv2/objdetect/objdetect.hpp +++ b/modules/objdetect/include/opencv2/objdetect/objdetect.hpp @@ -278,6 +278,7 @@ class CV_EXPORTS FeatureEvaluator public: enum { HAAR = 0, LBP = 1 }; virtual ~FeatureEvaluator(); + virtual bool read(const FileNode& node); virtual Ptr clone() const; virtual int getFeatureType() const; @@ -296,65 +297,96 @@ template<> CV_EXPORTS void Ptr::delete_obj(); class CV_EXPORTS_W CascadeClassifier { public: - struct CV_EXPORTS DTreeNode - { - int featureIdx; - float threshold; // for ordered features only - int left; - int right; - }; - - struct CV_EXPORTS DTree - { - int nodeCount; - }; - - struct CV_EXPORTS Stage - { - int first; - int ntrees; - float threshold; - }; - - enum { BOOST = 0 }; - enum { DO_CANNY_PRUNING = 1, SCALE_IMAGE = 2, - FIND_BIGGEST_OBJECT = 4, DO_ROUGH_SEARCH = 8 }; - CV_WRAP CascadeClassifier(); - CV_WRAP CascadeClassifier(const string& filename); - ~CascadeClassifier(); + CV_WRAP CascadeClassifier( const string& filename ); + virtual ~CascadeClassifier(); - CV_WRAP bool empty() const; - CV_WRAP bool load(const string& filename); - bool read(const FileNode& node); + CV_WRAP virtual bool empty() const; + CV_WRAP bool load( const string& filename ); + bool read( const FileNode& node ); CV_WRAP void detectMultiScale( const Mat& image, CV_OUT vector& objects, double scaleFactor=1.1, int minNeighbors=3, int flags=0, Size minSize=Size(), - Size maxSize=Size()); - + Size maxSize=Size() ); + + + bool isOldFormatCascade() const; + virtual Size getOriginalWindowSize() const; + int getFeatureType() const; + bool setImage(const Mat&); + +protected: + virtual bool detectSingleScale( const Mat& image, int stripCount, Size processingRectSize, + int stripSize, int yStep, double factor, vector& candidates ); + +private: + enum { BOOST = 0 }; + enum { DO_CANNY_PRUNING = 1, SCALE_IMAGE = 2, + FIND_BIGGEST_OBJECT = 4, DO_ROUGH_SEARCH = 8 }; + + friend class CascadeClassifierInvoker; + + template + friend int predictOrdered( CascadeClassifier& cascade, Ptr &featureEvaluator); + + template + friend int predictCategorical( CascadeClassifier& cascade, Ptr &featureEvaluator); + + template + friend int predictOrderedStump( CascadeClassifier& cascade, Ptr &featureEvaluator); + + template + friend int predictCategoricalStump( CascadeClassifier& cascade, Ptr &featureEvaluator); + bool setImage( Ptr&, const Mat& ); int runAt( Ptr&, Point ); - bool isStumpBased; - - int stageType; - int featureType; - int ncategories; - Size origWinSize; - - vector stages; - vector classifiers; - vector nodes; - vector leaves; - vector subsets; + class Data + { + public: + struct CV_EXPORTS DTreeNode + { + int featureIdx; + float threshold; // for ordered features only + int left; + int right; + }; + + struct CV_EXPORTS DTree + { + int nodeCount; + }; + + struct CV_EXPORTS Stage + { + int first; + int ntrees; + float threshold; + }; + + bool read(const FileNode &node); + + bool isStumpBased; + + int stageType; + int featureType; + int ncategories; + Size origWinSize; + + vector stages; + vector classifiers; + vector nodes; + vector leaves; + vector subsets; + }; - Ptr feval; + Data data; + Ptr featureEvaluator; Ptr oldCascade; }; - //////////////// HOG (Histogram-of-Oriented-Gradients) Descriptor and Object Detector ////////////// struct CV_EXPORTS_W HOGDescriptor diff --git a/modules/objdetect/src/cascadedetect.cpp b/modules/objdetect/src/cascadedetect.cpp index 224f89e1ea194634b9b5a94476c383a5c1af3b4f..200a3f885e346188d66b2e9eedc2d70c049b3c52 100644 --- a/modules/objdetect/src/cascadedetect.cpp +++ b/modules/objdetect/src/cascadedetect.cpp @@ -258,6 +258,7 @@ public: { return featuresPtr[featureIdx].calc(offset) * varianceNormFactor; } virtual double calcOrd(int featureIdx) const { return (*this)(featureIdx); } + private: Size origWinSize; Ptr > features; @@ -440,6 +441,7 @@ bool HaarEvaluator::setWindow( Point pt ) nf = 1.; varianceNormFactor = 1./nf; offset = (int)pOffset; + return true; } @@ -614,7 +616,7 @@ CascadeClassifier::~CascadeClassifier() bool CascadeClassifier::empty() const { - return oldCascade.empty() && stages.empty(); + return oldCascade.empty() && data.stages.empty(); } bool CascadeClassifier::load(const string& filename) @@ -635,31 +637,31 @@ bool CascadeClassifier::load(const string& filename) } template -inline int predictOrdered( CascadeClassifier& cascade, Ptr &_feval ) +inline int predictOrdered( CascadeClassifier& cascade, Ptr &_featureEvaluator ) { - int si, nstages = (int)cascade.stages.size(); + int nstages = (int)cascade.data.stages.size(); int nodeOfs = 0, leafOfs = 0; - FEval& feval = (FEval&)*_feval; - float* cascadeLeaves = &cascade.leaves[0]; - CascadeClassifier::DTreeNode* cascadeNodes = &cascade.nodes[0]; - CascadeClassifier::DTree* cascadeWeaks = &cascade.classifiers[0]; - CascadeClassifier::Stage* cascadeStages = &cascade.stages[0]; + FEval& featureEvaluator = (FEval&)*_featureEvaluator; + float* cascadeLeaves = &cascade.data.leaves[0]; + CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0]; + CascadeClassifier::Data::DTree* cascadeWeaks = &cascade.data.classifiers[0]; + CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0]; - for( si = 0; si < nstages; si++ ) + for( int si = 0; si < nstages; si++ ) { - CascadeClassifier::Stage& stage = cascadeStages[si]; + CascadeClassifier::Data::Stage& stage = cascadeStages[si]; int wi, ntrees = stage.ntrees; double sum = 0; for( wi = 0; wi < ntrees; wi++ ) { - CascadeClassifier::DTree& weak = cascadeWeaks[stage.first + wi]; + CascadeClassifier::Data::DTree& weak = cascadeWeaks[stage.first + wi]; int idx = 0, root = nodeOfs; do { - CascadeClassifier::DTreeNode& node = cascadeNodes[root + idx]; - double val = feval(node.featureIdx); + CascadeClassifier::Data::DTreeNode& node = cascadeNodes[root + idx]; + double val = featureEvaluator(node.featureIdx); idx = val < node.threshold ? node.left : node.right; } while( idx > 0 ); @@ -674,32 +676,32 @@ inline int predictOrdered( CascadeClassifier& cascade, Ptr &_f } template -inline int predictCategorical( CascadeClassifier& cascade, Ptr &_feval ) +inline int predictCategorical( CascadeClassifier& cascade, Ptr &_featureEvaluator ) { - int si, nstages = (int)cascade.stages.size(); + int nstages = (int)cascade.data.stages.size(); int nodeOfs = 0, leafOfs = 0; - FEval& feval = (FEval&)*_feval; - size_t subsetSize = (cascade.ncategories + 31)/32; - int* cascadeSubsets = &cascade.subsets[0]; - float* cascadeLeaves = &cascade.leaves[0]; - CascadeClassifier::DTreeNode* cascadeNodes = &cascade.nodes[0]; - CascadeClassifier::DTree* cascadeWeaks = &cascade.classifiers[0]; - CascadeClassifier::Stage* cascadeStages = &cascade.stages[0]; + FEval& featureEvaluator = (FEval&)*_featureEvaluator; + size_t subsetSize = (cascade.data.ncategories + 31)/32; + int* cascadeSubsets = &cascade.data.subsets[0]; + float* cascadeLeaves = &cascade.data.leaves[0]; + CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0]; + CascadeClassifier::Data::DTree* cascadeWeaks = &cascade.data.classifiers[0]; + CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0]; - for( si = 0; si < nstages; si++ ) + for(int si = 0; si < nstages; si++ ) { - CascadeClassifier::Stage& stage = cascadeStages[si]; + CascadeClassifier::Data::Stage& stage = cascadeStages[si]; int wi, ntrees = stage.ntrees; double sum = 0; for( wi = 0; wi < ntrees; wi++ ) { - CascadeClassifier::DTree& weak = cascadeWeaks[stage.first + wi]; + CascadeClassifier::Data::DTree& weak = cascadeWeaks[stage.first + wi]; int idx = 0, root = nodeOfs; do { - CascadeClassifier::DTreeNode& node = cascadeNodes[root + idx]; - int c = feval(node.featureIdx); + CascadeClassifier::Data::DTreeNode& node = cascadeNodes[root + idx]; + int c = featureEvaluator(node.featureIdx); const int* subset = &cascadeSubsets[(root + idx)*subsetSize]; idx = (subset[c>>5] & (1 << (c & 31))) ? node.left : node.right; } @@ -715,25 +717,25 @@ inline int predictCategorical( CascadeClassifier& cascade, Ptr } template -inline int predictOrderedStump( CascadeClassifier& cascade, Ptr &_feval ) +inline int predictOrderedStump( CascadeClassifier& cascade, Ptr &_featureEvaluator ) { int nodeOfs = 0, leafOfs = 0; - FEval& feval = (FEval&)*_feval; - float* cascadeLeaves = &cascade.leaves[0]; - CascadeClassifier::DTreeNode* cascadeNodes = &cascade.nodes[0]; - CascadeClassifier::Stage* cascadeStages = &cascade.stages[0]; + FEval& featureEvaluator = (FEval&)*_featureEvaluator; + float* cascadeLeaves = &cascade.data.leaves[0]; + CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0]; + CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0]; - int nstages = (int)cascade.stages.size(); + int nstages = (int)cascade.data.stages.size(); for( int stageIdx = 0; stageIdx < nstages; stageIdx++ ) { - CascadeClassifier::Stage& stage = cascadeStages[stageIdx]; + CascadeClassifier::Data::Stage& stage = cascadeStages[stageIdx]; double sum = 0.0; int ntrees = stage.ntrees; for( int i = 0; i < ntrees; i++, nodeOfs++, leafOfs+= 2 ) { - CascadeClassifier::DTreeNode& node = cascadeNodes[nodeOfs]; - double value = feval(node.featureIdx); + CascadeClassifier::Data::DTreeNode& node = cascadeNodes[nodeOfs]; + double value = featureEvaluator(node.featureIdx); sum += cascadeLeaves[ value < node.threshold ? leafOfs : leafOfs + 1 ]; } @@ -745,27 +747,27 @@ inline int predictOrderedStump( CascadeClassifier& cascade, Ptr -inline int predictCategoricalStump( CascadeClassifier& cascade, Ptr &_feval ) +inline int predictCategoricalStump( CascadeClassifier& cascade, Ptr &_featureEvaluator ) { - int si, nstages = (int)cascade.stages.size(); + int nstages = (int)cascade.data.stages.size(); int nodeOfs = 0, leafOfs = 0; - FEval& feval = (FEval&)*_feval; - size_t subsetSize = (cascade.ncategories + 31)/32; - int* cascadeSubsets = &cascade.subsets[0]; - float* cascadeLeaves = &cascade.leaves[0]; - CascadeClassifier::DTreeNode* cascadeNodes = &cascade.nodes[0]; - CascadeClassifier::Stage* cascadeStages = &cascade.stages[0]; - - for( si = 0; si < nstages; si++ ) + FEval& featureEvaluator = (FEval&)*_featureEvaluator; + size_t subsetSize = (cascade.data.ncategories + 31)/32; + int* cascadeSubsets = &cascade.data.subsets[0]; + float* cascadeLeaves = &cascade.data.leaves[0]; + CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0]; + CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0]; + + for( int si = 0; si < nstages; si++ ) { - CascadeClassifier::Stage& stage = cascadeStages[si]; + CascadeClassifier::Data::Stage& stage = cascadeStages[si]; int wi, ntrees = stage.ntrees; double sum = 0; for( wi = 0; wi < ntrees; wi++ ) { - CascadeClassifier::DTreeNode& node = cascadeNodes[nodeOfs]; - int c = feval(node.featureIdx); + CascadeClassifier::Data::DTreeNode& node = cascadeNodes[nodeOfs]; + int c = featureEvaluator(node.featureIdx); const int* subset = &cascadeSubsets[nodeOfs*subsetSize]; sum += cascadeLeaves[ subset[c>>5] & (1 << (c & 31)) ? leafOfs : leafOfs+1]; nodeOfs++; @@ -780,43 +782,30 @@ inline int predictCategoricalStump( CascadeClassifier& cascade, Ptr& featureEvaluator, Point pt ) { CV_Assert( oldCascade.empty() ); - /*if( !oldCascade.empty() ) - return cvRunHaarClassifierCascade(oldCascade, pt, 0);*/ - assert(featureType == FeatureEvaluator::HAAR || - featureType == FeatureEvaluator::LBP); + assert(data.featureType == FeatureEvaluator::HAAR || + data.featureType == FeatureEvaluator::LBP); return !featureEvaluator->setWindow(pt) ? -1 : - isStumpBased ? ( featureType == FeatureEvaluator::HAAR ? + data.isStumpBased ? ( data.featureType == FeatureEvaluator::HAAR ? predictOrderedStump( *this, featureEvaluator ) : predictCategoricalStump( *this, featureEvaluator ) ) : - ( featureType == FeatureEvaluator::HAAR ? + ( data.featureType == FeatureEvaluator::HAAR ? predictOrdered( *this, featureEvaluator ) : predictCategorical( *this, featureEvaluator ) ); } bool CascadeClassifier::setImage( Ptr& featureEvaluator, const Mat& image ) { - /*if( !oldCascade.empty() ) - { - Mat sum(image.rows+1, image.cols+1, CV_32S); - Mat tilted(image.rows+1, image.cols+1, CV_32S); - Mat sqsum(image.rows+1, image.cols+1, CV_64F); - integral(image, sum, sqsum, tilted); - CvMat _sum = sum, _sqsum = sqsum, _tilted = tilted; - cvSetImagesForHaarClassifierCascade( oldCascade, &_sum, &_sqsum, &_tilted, 1. ); - return true; - }*/ - return empty() ? false : featureEvaluator->setImage(image, origWinSize); + return empty() ? false : featureEvaluator->setImage(image, data.origWinSize); } - struct CascadeClassifierInvoker { CascadeClassifierInvoker( CascadeClassifier& _cc, Size _sz1, int _stripSize, int _yStep, double _factor, ConcurrentRectVector& _vec ) { classifier = &_cc; - processingAreaSize = _sz1; + processingRectSize = _sz1; stripSize = _stripSize; yStep = _yStep; scalingFactor = _factor; @@ -825,19 +814,19 @@ struct CascadeClassifierInvoker void operator()(const BlockedRange& range) const { - Ptr evaluator = classifier->feval->clone(); - Size winSize(cvRound(classifier->origWinSize.width * scalingFactor), cvRound(classifier->origWinSize.height * scalingFactor)); + Ptr evaluator = classifier->featureEvaluator->clone(); + Size winSize(cvRound(classifier->data.origWinSize.width * scalingFactor), cvRound(classifier->data.origWinSize.height * scalingFactor)); int y1 = range.begin() * stripSize; - int y2 = min(range.end() * stripSize, processingAreaSize.height); + int y2 = min(range.end() * stripSize, processingRectSize.height); for( int y = y1; y < y2; y += yStep ) { - for( int x = 0; x < processingAreaSize.width; x += yStep ) + for( int x = 0; x < processingRectSize.width; x += yStep ) { int result = classifier->runAt(evaluator, Point(x, y)); if( result > 0 ) rectangles->push_back(Rect(cvRound(x*scalingFactor), cvRound(y*scalingFactor), - winSize.width, winSize.height)); + winSize.width, winSize.height)); if( result == 0 ) x += yStep; } @@ -846,14 +835,46 @@ struct CascadeClassifierInvoker CascadeClassifier* classifier; ConcurrentRectVector* rectangles; - Size processingAreaSize; + Size processingRectSize; int stripSize, yStep; double scalingFactor; }; - struct getRect { Rect operator ()(const CvAvgComp& e) const { return e.rect; } }; +bool CascadeClassifier::detectSingleScale( const Mat& image, int stripCount, Size processingRectSize, + int stripSize, int yStep, double factor, vector& candidates ) +{ + if( !featureEvaluator->setImage( image, data.origWinSize ) ) + return false; + + ConcurrentRectVector concurrentCandidates; + parallel_for(BlockedRange(0, stripCount), CascadeClassifierInvoker( *this, processingRectSize, stripSize, yStep, factor, concurrentCandidates)); + candidates.insert( candidates.end(), concurrentCandidates.begin(), concurrentCandidates.end() ); + + return true; +} + +bool CascadeClassifier::isOldFormatCascade() const +{ + return !oldCascade.empty(); +} + +int CascadeClassifier::getFeatureType() const +{ + return featureEvaluator->getFeatureType(); +} + +Size CascadeClassifier::getOriginalWindowSize() const +{ + return data.origWinSize; +} + +bool CascadeClassifier::setImage(const Mat& image) +{ + featureEvaluator->setImage(image, data.origWinSize); +} + void CascadeClassifier::detectMultiScale( const Mat& image, vector& objects, double scaleFactor, int minNeighbors, int flags, Size minObjectSize, Size maxObjectSize ) @@ -865,7 +886,7 @@ void CascadeClassifier::detectMultiScale( const Mat& image, vector& object if( empty() ) return; - if( !oldCascade.empty() ) + if( isOldFormatCascade() ) { MemStorage storage(cvCreateMemStorage(0)); CvMat _image = image; @@ -892,51 +913,50 @@ void CascadeClassifier::detectMultiScale( const Mat& image, vector& object } Mat imageBuffer(image.rows + 1, image.cols + 1, CV_8U); - ConcurrentRectVector candidates; + vector candidates; for( double factor = 1; ; factor *= scaleFactor ) { - int stripCount, stripSize; + Size originalWindowSize = getOriginalWindowSize(); - Size windowSize( cvRound(origWinSize.width*factor), cvRound(origWinSize.height*factor) ); + Size windowSize( cvRound(originalWindowSize.width*factor), cvRound(originalWindowSize.height*factor) ); Size scaledImageSize( cvRound( grayImage.cols/factor ), cvRound( grayImage.rows/factor ) ); - Size processingAreaSize( scaledImageSize.width - origWinSize.width, scaledImageSize.height - origWinSize.height ); + Size processingRectSize( scaledImageSize.width - originalWindowSize.width, scaledImageSize.height - originalWindowSize.height ); - if( processingAreaSize.width <= 0 || processingAreaSize.height <= 0 ) + if( processingRectSize.width <= 0 || processingRectSize.height <= 0 ) break; if( windowSize.width > maxObjectSize.width || windowSize.height > maxObjectSize.height ) break; if( windowSize.width < minObjectSize.width || windowSize.height < minObjectSize.height ) continue; + Mat scaledImage( scaledImageSize, CV_8U, imageBuffer.data ); + resize( grayImage, scaledImage, scaledImageSize, 0, 0, CV_INTER_LINEAR ); + int yStep = factor > 2. ? 1 : 2; + int stripCount, stripSize; #ifdef HAVE_TBB const int PTS_PER_THREAD = 1000; - stripCount = ((processingAreaSize.width/yStep)*(processingAreaSize.height + yStep-1)/yStep + PTS_PER_THREAD/2)/PTS_PER_THREAD; + stripCount = ((processingRectSize.width/yStep)*(processingRectSize.height + yStep-1)/yStep + PTS_PER_THREAD/2)/PTS_PER_THREAD; stripCount = std::min(std::max(stripCount, 1), 100); - stripSize = (((processingAreaSize.height + stripCount - 1)/stripCount + yStep-1)/yStep)*yStep; + stripSize = (((processingRectSize.height + stripCount - 1)/stripCount + yStep-1)/yStep)*yStep; #else stripCount = 1; - stripSize = processingAreaSize.height; + stripSize = processingRectSize.height; #endif - Mat scaledImage( scaledImageSize, CV_8U, imageBuffer.data ); - resize( grayImage, scaledImage, scaledImageSize, 0, 0, CV_INTER_LINEAR ); - if( !feval->setImage( scaledImage, origWinSize ) ) + if( !detectSingleScale( scaledImage, stripCount, processingRectSize, stripSize, yStep, factor, candidates ) ) break; - - parallel_for(BlockedRange(0, stripCount), CascadeClassifierInvoker(*this, processingAreaSize, stripSize, yStep, factor, candidates)); } - + objects.resize(candidates.size()); std::copy(candidates.begin(), candidates.end(), objects.begin()); groupRectangles( objects, minNeighbors, GROUP_EPS ); } - -bool CascadeClassifier::read(const FileNode& root) +bool CascadeClassifier::Data::read(const FileNode &root) { // load stage params string stageTypeStr = (string)root[CC_STAGE_TYPE]; @@ -944,7 +964,7 @@ bool CascadeClassifier::read(const FileNode& root) stageType = BOOST; else return false; - + string featureTypeStr = (string)root[CC_FEATURE_TYPE]; if( featureTypeStr == CC_HAAR ) featureType = FeatureEvaluator::HAAR; @@ -952,33 +972,33 @@ bool CascadeClassifier::read(const FileNode& root) featureType = FeatureEvaluator::LBP; else return false; - + origWinSize.width = (int)root[CC_WIDTH]; origWinSize.height = (int)root[CC_HEIGHT]; CV_Assert( origWinSize.height > 0 && origWinSize.width > 0 ); - + isStumpBased = (int)(root[CC_STAGE_PARAMS][CC_MAX_DEPTH]) == 1 ? true : false; // load feature params FileNode fn = root[CC_FEATURE_PARAMS]; if( fn.empty() ) return false; - + ncategories = fn[CC_MAX_CAT_COUNT]; int subsetSize = (ncategories + 31)/32, nodeStep = 3 + ( ncategories>0 ? subsetSize : 1 ); - + // load stages fn = root[CC_STAGES]; if( fn.empty() ) return false; - + stages.reserve(fn.size()); classifiers.clear(); nodes.clear(); - + FileNodeIterator it = fn.begin(), it_end = fn.end(); - + for( int si = 0; it != it_end; si++, ++it ) { FileNode fns = *it; @@ -991,7 +1011,7 @@ bool CascadeClassifier::read(const FileNode& root) stage.first = (int)classifiers.size(); stages.push_back(stage); classifiers.reserve(stages[si].first + stages[si].ntrees); - + FileNodeIterator it1 = fns.begin(), it1_end = fns.end(); for( ; it1 != it1_end; ++it1 ) // weak trees { @@ -1000,56 +1020,62 @@ bool CascadeClassifier::read(const FileNode& root) FileNode leafValues = fnw[CC_LEAF_VALUES]; if( internalNodes.empty() || leafValues.empty() ) return false; + DTree tree; tree.nodeCount = (int)internalNodes.size()/nodeStep; classifiers.push_back(tree); - + nodes.reserve(nodes.size() + tree.nodeCount); leaves.reserve(leaves.size() + leafValues.size()); if( subsetSize > 0 ) subsets.reserve(subsets.size() + tree.nodeCount*subsetSize); - - FileNodeIterator it2 = internalNodes.begin(), it2_end = internalNodes.end(); - - for( ; it2 != it2_end; ) // nodes + + FileNodeIterator internalNodesIter = internalNodes.begin(), internalNodesEnd = internalNodes.end(); + + for( ; internalNodesIter != internalNodesEnd; ) // nodes { DTreeNode node; - node.left = (int)*it2; ++it2; - node.right = (int)*it2; ++it2; - node.featureIdx = (int)*it2; ++it2; + node.left = (int)*internalNodesIter; ++internalNodesIter; + node.right = (int)*internalNodesIter; ++internalNodesIter; + node.featureIdx = (int)*internalNodesIter; ++internalNodesIter; if( subsetSize > 0 ) { - for( int j = 0; j < subsetSize; j++, ++it2 ) - subsets.push_back((int)*it2); + for( int j = 0; j < subsetSize; j++, ++internalNodesIter ) + subsets.push_back((int)*internalNodesIter); node.threshold = 0.f; } else { - node.threshold = (float)*it2; ++it2; + node.threshold = (float)*internalNodesIter; ++internalNodesIter; } nodes.push_back(node); } - - it2 = leafValues.begin(), it2_end = leafValues.end(); - - for( ; it2 != it2_end; ++it2 ) // leaves - leaves.push_back((float)*it2); + + internalNodesIter = leafValues.begin(), internalNodesEnd = leafValues.end(); + + for( ; internalNodesIter != internalNodesEnd; ++internalNodesIter ) // leaves + leaves.push_back((float)*internalNodesIter); } } + return true; +} + +bool CascadeClassifier::read(const FileNode& root) +{ + if( !data.read(root) ) + return false; + // load features - feval = FeatureEvaluator::create(featureType); - fn = root[CC_FEATURES]; + featureEvaluator = FeatureEvaluator::create(data.featureType); + FileNode fn = root[CC_FEATURES]; if( fn.empty() ) return false; - return feval->read(fn); + return featureEvaluator->read(fn); } template<> void Ptr::delete_obj() { cvReleaseHaarClassifierCascade(&obj); } } // namespace cv - -/* End of file. */ - diff --git a/modules/traincascade/boost.cpp b/modules/traincascade/boost.cpp index afab53f21a876c1ca0464aa652338d522e97b0d4..363d30c5ca1b993cb1f4e20616d305da69812ab7 100644 --- a/modules/traincascade/boost.cpp +++ b/modules/traincascade/boost.cpp @@ -474,9 +474,9 @@ float CvCascadeBoostTrainData::getVarValue( int vi, int si ) struct FeatureIdxOnlyPrecalc { - FeatureIdxOnlyPrecalc( const CvFeatureEvaluator* _feval, CvMat* _buf, int _sample_count, bool _is_buf_16u ) + FeatureIdxOnlyPrecalc( const CvFeatureEvaluator* _featureEvaluator, CvMat* _buf, int _sample_count, bool _is_buf_16u ) { - feval = _feval; + featureEvaluator = _featureEvaluator; sample_count = _sample_count; udst = (unsigned short*)_buf->data.s; idst = _buf->data.i; @@ -490,7 +490,7 @@ struct FeatureIdxOnlyPrecalc { for( int si = 0; si < sample_count; si++ ) { - valCachePtr[si] = (*feval)( fi, si ); + valCachePtr[si] = (*featureEvaluator)( fi, si ); if ( is_buf_16u ) *(udst + fi*sample_count + si) = (unsigned short)si; else @@ -502,7 +502,7 @@ struct FeatureIdxOnlyPrecalc icvSortIntAux( idst + fi*sample_count, sample_count, valCachePtr ); } } - const CvFeatureEvaluator* feval; + const CvFeatureEvaluator* featureEvaluator; int sample_count; int* idst; unsigned short* udst; @@ -511,9 +511,9 @@ struct FeatureIdxOnlyPrecalc struct FeatureValAndIdxPrecalc { - FeatureValAndIdxPrecalc( const CvFeatureEvaluator* _feval, CvMat* _buf, Mat* _valCache, int _sample_count, bool _is_buf_16u ) + FeatureValAndIdxPrecalc( const CvFeatureEvaluator* _featureEvaluator, CvMat* _buf, Mat* _valCache, int _sample_count, bool _is_buf_16u ) { - feval = _feval; + featureEvaluator = _featureEvaluator; valCache = _valCache; sample_count = _sample_count; udst = (unsigned short*)_buf->data.s; @@ -526,7 +526,7 @@ struct FeatureValAndIdxPrecalc { for( int si = 0; si < sample_count; si++ ) { - valCache->at(fi,si) = (*feval)( fi, si ); + valCache->at(fi,si) = (*featureEvaluator)( fi, si ); if ( is_buf_16u ) *(udst + fi*sample_count + si) = (unsigned short)si; else @@ -538,7 +538,7 @@ struct FeatureValAndIdxPrecalc icvSortIntAux( idst + fi*sample_count, sample_count, valCache->ptr(fi) ); } } - const CvFeatureEvaluator* feval; + const CvFeatureEvaluator* featureEvaluator; Mat* valCache; int sample_count; int* idst; @@ -548,9 +548,9 @@ struct FeatureValAndIdxPrecalc struct FeatureValOnlyPrecalc { - FeatureValOnlyPrecalc( const CvFeatureEvaluator* _feval, Mat* _valCache, int _sample_count ) + FeatureValOnlyPrecalc( const CvFeatureEvaluator* _featureEvaluator, Mat* _valCache, int _sample_count ) { - feval = _feval; + featureEvaluator = _featureEvaluator; valCache = _valCache; sample_count = _sample_count; } @@ -558,9 +558,9 @@ struct FeatureValOnlyPrecalc { for ( int fi = range.begin(); fi < range.end(); fi++) for( int si = 0; si < sample_count; si++ ) - valCache->at(fi,si) = (*feval)( fi, si ); + valCache->at(fi,si) = (*featureEvaluator)( fi, si ); } - const CvFeatureEvaluator* feval; + const CvFeatureEvaluator* featureEvaluator; Mat* valCache; int sample_count; };