提交 ef966e15 编写于 作者: A Alexey Spizhevoy

updated focal estimation (opencv_stitching) + refactoring

上级 5c5cd449
......@@ -76,35 +76,46 @@ void focalsFromHomography(const Mat& H, double &f0, double &f1, bool &f0_ok, boo
}
double estimateFocal(const vector<ImageFeatures> &features, const vector<MatchesInfo> &pairwise_matches)
void estimateFocal(const vector<ImageFeatures> &features, const vector<MatchesInfo> &pairwise_matches,
vector<double> &focals)
{
const int num_images = static_cast<int>(features.size());
focals.resize(num_images);
vector<double> focals;
for (int src_idx = 0; src_idx < num_images; ++src_idx)
{
for (int dst_idx = 0; dst_idx < num_images; ++dst_idx)
vector<vector<double> > all_focals(num_images);
for (int i = 0; i < num_images; ++i)
{
for (int j = 0; j < num_images; ++j)
{
const MatchesInfo &m = pairwise_matches[src_idx*num_images + dst_idx];
const MatchesInfo &m = pairwise_matches[i*num_images + j];
if (m.H.empty())
continue;
double f0, f1;
bool f0ok, f1ok;
focalsFromHomography(m.H, f0, f1, f0ok, f1ok);
if (f0ok && f1ok) focals.push_back(sqrt(f0*f1));
if (f0ok && f1ok)
{
all_focals[i].push_back(f0);
all_focals[j].push_back(f1);
}
}
}
if (static_cast<int>(focals.size()) >= num_images * (num_images - 1) / 2)
for (int i = 0; i < num_images; ++i)
{
nth_element(focals.begin(), focals.end(), focals.begin() + focals.size()/2);
return focals[focals.size()/2];
}
if (all_focals[i].size() > 0)
{
nth_element(all_focals[i].begin(), all_focals[i].end(),
all_focals[i].begin() + all_focals[i].size()/2);
focals[i] = all_focals[i][all_focals[i].size()/2];
}
else
{
LOGLN("Can't estimate focal length #" << i << ", will use naive approach");
focals[i] = features[i].img_size.width + features[i].img_size.height;
}
LOGLN("Can't estimate focal length, will use naive approach");
double focals_sum = 0;
for (int i = 0; i < num_images; ++i)
focals_sum += features[i].img_size.width + features[i].img_size.height;
return focals_sum / num_images;
}
}
......@@ -49,6 +49,7 @@
// by Heung-Yeung Shum and Richard Szeliski.
void focalsFromHomography(const cv::Mat &H, double &f0, double &f1, bool &f0_ok, bool &f1_ok);
double estimateFocal(const std::vector<ImageFeatures> &features, const std::vector<MatchesInfo> &pairwise_matches);
void estimateFocal(const std::vector<ImageFeatures> &features, const std::vector<MatchesInfo> &pairwise_matches,
std::vector<double> &focals);
#endif // __OPENCV_AUTOCALIB_HPP__
......@@ -185,6 +185,11 @@ void MultiBandBlender::feed(const Mat &img, const Mat &mask, Point tl)
CV_Assert(img.type() == CV_16SC3);
CV_Assert(mask.type() == CV_8U);
//int gap = 10 * (1 << num_bands_);
//Point tl_new(max(dst_roi_.x, tl.x - gap),
// max(dst_roi_.y, tl.y - gap));
//Point br_new(min(dst_roi_.br().x, tl.x + img.cols + gap),
// min(dst_roi_.br().y, tl.y + img.rows + gap));
Point tl_new(dst_roi_.tl());
Point br_new(dst_roi_.br());
int top = tl.y - tl_new.y;
......@@ -215,8 +220,10 @@ void MultiBandBlender::feed(const Mat &img, const Mat &mask, Point tl)
// Add weighted layer of the source image to the final Laplacian pyramid layer
for (int i = 0; i <= num_bands_; ++i)
{
int dx = 0;//(tl_new.x >> i) - (dst_roi_.x >> i);
int dy = 0;//(tl_new.y >> i) - (dst_roi_.y >> i);
int dx = 0;
int dy = 0;
//int dx = (tl_new.x >> i) - (dst_roi_.x >> i);
//int dy = (tl_new.y >> i) - (dst_roi_.y >> i);
for (int y = 0; y < src_pyr_laplace[i].rows; ++y)
{
......
......@@ -288,8 +288,11 @@ void FeaturesMatcher::operator ()(const vector<ImageFeatures> &features, vector<
//////////////////////////////////////////////////////////////////////////////
namespace
namespace
{
// These two classes are aimed to find features matches only, not to
// estimate homography
class CpuMatcher : public FeaturesMatcher
{
public:
......@@ -300,10 +303,24 @@ namespace
float match_conf_;
};
class GpuMatcher : public FeaturesMatcher
{
public:
GpuMatcher(float match_conf) : match_conf_(match_conf) {}
void match(const ImageFeatures &features1, const ImageFeatures &features2, MatchesInfo& matches_info);
private:
float match_conf_;
GpuMat descriptors1_;
GpuMat descriptors2_;
GpuMat train_idx_, distance_, all_dist_;
};
void CpuMatcher::match(const ImageFeatures &features1, const ImageFeatures &features2, MatchesInfo& matches_info)
{
matches_info.matches.clear();
BruteForceMatcher< L2<float> > matcher;
vector< vector<DMatch> > pair_matches;
......@@ -332,34 +349,19 @@ namespace
matches_info.matches.push_back(DMatch(m0.trainIdx, m0.queryIdx, m0.distance));
}
}
class GpuMatcher : public FeaturesMatcher
{
public:
GpuMatcher(float match_conf) : match_conf_(match_conf) {}
void match(const ImageFeatures &features1, const ImageFeatures &features2, MatchesInfo& matches_info);
private:
float match_conf_;
GpuMat descriptors1_;
GpuMat descriptors2_;
GpuMat trainIdx_, distance_, allDist_;
};
void GpuMatcher::match(const ImageFeatures &features1, const ImageFeatures &features2, MatchesInfo& matches_info)
{
matches_info.matches.clear();
BruteForceMatcher_GPU< L2<float> > matcher;
matches_info.matches.clear();
descriptors1_.upload(features1.descriptors);
descriptors2_.upload(features2.descriptors);
BruteForceMatcher_GPU< L2<float> > matcher;
vector< vector<DMatch> > pair_matches;
// Find 1->2 matches
matcher.knnMatch(descriptors1_, descriptors2_, trainIdx_, distance_, allDist_, 2);
matcher.knnMatchDownload(trainIdx_, distance_, pair_matches);
matcher.knnMatch(descriptors1_, descriptors2_, train_idx_, distance_, all_dist_, 2);
matcher.knnMatchDownload(train_idx_, distance_, pair_matches);
for (size_t i = 0; i < pair_matches.size(); ++i)
{
if (pair_matches[i].size() < 2)
......@@ -376,8 +378,8 @@ namespace
// Find 2->1 matches
pair_matches.clear();
matcher.knnMatch(descriptors2_, descriptors1_, trainIdx_, distance_, allDist_, 2);
matcher.knnMatchDownload(trainIdx_, distance_, pair_matches);
matcher.knnMatch(descriptors2_, descriptors1_, train_idx_, distance_, all_dist_, 2);
matcher.knnMatchDownload(train_idx_, distance_, pair_matches);
for (size_t i = 0; i < pair_matches.size(); ++i)
{
if (pair_matches[i].size() < 2)
......@@ -392,7 +394,9 @@ namespace
matches_info.matches.push_back(DMatch(m0.trainIdx, m0.queryIdx, m0.distance));
}
}
}
} // anonymous namespace
BestOf2NearestMatcher::BestOf2NearestMatcher(bool try_use_gpu, float match_conf, int num_matches_thresh1, int num_matches_thresh2)
{
......
......@@ -97,16 +97,18 @@ class FeaturesMatcher
{
public:
virtual ~FeaturesMatcher() {}
void operator ()(const ImageFeatures &features1, const ImageFeatures &features2, MatchesInfo& matches_info)
{
match(features1, features2, matches_info);
}
void operator ()(const ImageFeatures &features1, const ImageFeatures &features2,
MatchesInfo& matches_info) { match(features1, features2, matches_info); }
void operator ()(const std::vector<ImageFeatures> &features, std::vector<MatchesInfo> &pairwise_matches);
bool isThreadSafe() const { return is_thread_safe_; }
protected:
FeaturesMatcher(bool is_thread_safe = false) : is_thread_safe_(is_thread_safe) {}
virtual void match(const ImageFeatures &features1, const ImageFeatures &features2, MatchesInfo& matches_info) = 0;
virtual void match(const ImageFeatures &features1, const ImageFeatures &features2,
MatchesInfo& matches_info) = 0;
bool is_thread_safe_;
};
......@@ -115,14 +117,14 @@ protected:
class BestOf2NearestMatcher : public FeaturesMatcher
{
public:
BestOf2NearestMatcher(bool try_use_gpu = true, float match_conf = 0.55f, int num_matches_thresh1 = 6, int num_matches_thresh2 = 6);
BestOf2NearestMatcher(bool try_use_gpu = true, float match_conf = 0.55f, int num_matches_thresh1 = 6,
int num_matches_thresh2 = 6);
protected:
void match(const ImageFeatures &features1, const ImageFeatures &features2, MatchesInfo &matches_info);
int num_matches_thresh1_;
int num_matches_thresh2_;
cv::Ptr<FeaturesMatcher> impl_;
};
......
......@@ -109,10 +109,11 @@ void HomographyBasedEstimator::estimate(const vector<ImageFeatures> &features, c
const int num_images = static_cast<int>(features.size());
// Estimate focal length and set it for all cameras
double focal = estimateFocal(features, pairwise_matches);
vector<double> focals;
estimateFocal(features, pairwise_matches, focals);
cameras.resize(num_images);
for (int i = 0; i < num_images; ++i)
cameras[i].focal = focal;
cameras[i].focal = focals[i];
// Restore global motion
Graph span_tree;
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册