提交 710506e9 编写于 作者: V Vladislav Sovrasov

calib3d: add a new overload for recoverPose

上级 dcbed8d6
......@@ -1433,6 +1433,28 @@ CV_EXPORTS_W int recoverPose( InputArray E, InputArray points1, InputArray point
double focal = 1.0, Point2d pp = Point2d(0, 0),
InputOutputArray mask = noArray() );
/** @overload
@param E The input essential matrix.
@param points1 Array of N 2D points from the first image. The point coordinates should be
floating-point (single or double precision).
@param points2 Array of the second image points of the same size and format as points1.
@param cameraMatrix Camera matrix \f$K = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\f$ .
Note that this function assumes that points1 and points2 are feature points from cameras with the
same camera matrix.
@param R Recovered relative rotation.
@param t Recoverd relative translation.
@param distanceThresh threshold distance which is used to filter out far away points (i.e. infinite points).
@param mask Input/output mask for inliers in points1 and points2.
: If it is not empty, then it marks inliers in points1 and points2 for then given essential
matrix E. Only these inliers will be used to recover pose. In the output mask only inliers
which pass the cheirality check.
@param triangulatedPoints 3d points which were reconstructed by triangulation.
*/
CV_EXPORTS_W int recoverPose( InputArray E, InputArray points1, InputArray points2,
InputArray cameraMatrix, OutputArray R, OutputArray t, double distanceThresh, InputOutputArray mask = noArray(),
OutputArray triangulatedPoints = noArray());
/** @brief For points in an image of a stereo pair, computes the corresponding epilines in the other image.
@param points Input points. \f$N \times 1\f$ or \f$1 \times N\f$ matrix of type CV_32FC2 or
......
......@@ -458,8 +458,9 @@ cv::Mat cv::findEssentialMat( InputArray _points1, InputArray _points2, double f
return cv::findEssentialMat(_points1, _points2, cameraMatrix, method, prob, threshold, _mask);
}
int cv::recoverPose( InputArray E, InputArray _points1, InputArray _points2, InputArray _cameraMatrix,
OutputArray _R, OutputArray _t, InputOutputArray _mask)
int cv::recoverPose( InputArray E, InputArray _points1, InputArray _points2,
InputArray _cameraMatrix, OutputArray _R, OutputArray _t, double distanceThresh,
InputOutputArray _mask, OutputArray triangulatedPoints)
{
CV_INSTRUMENT_REGION()
......@@ -506,51 +507,60 @@ int cv::recoverPose( InputArray E, InputArray _points1, InputArray _points2, Inp
// Notice here a threshold dist is used to filter
// out far away points (i.e. infinite points) since
// there depth may vary between postive and negtive.
double dist = 50.0;
std::vector<Mat> allTriangulations(4);
Mat Q;
triangulatePoints(P0, P1, points1, points2, Q);
if(triangulatedPoints.needed())
Q.copyTo(allTriangulations[0]);
Mat mask1 = Q.row(2).mul(Q.row(3)) > 0;
Q.row(0) /= Q.row(3);
Q.row(1) /= Q.row(3);
Q.row(2) /= Q.row(3);
Q.row(3) /= Q.row(3);
mask1 = (Q.row(2) < dist) & mask1;
mask1 = (Q.row(2) < distanceThresh) & mask1;
Q = P1 * Q;
mask1 = (Q.row(2) > 0) & mask1;
mask1 = (Q.row(2) < dist) & mask1;
mask1 = (Q.row(2) < distanceThresh) & mask1;
triangulatePoints(P0, P2, points1, points2, Q);
if(triangulatedPoints.needed())
Q.copyTo(allTriangulations[1]);
Mat mask2 = Q.row(2).mul(Q.row(3)) > 0;
Q.row(0) /= Q.row(3);
Q.row(1) /= Q.row(3);
Q.row(2) /= Q.row(3);
Q.row(3) /= Q.row(3);
mask2 = (Q.row(2) < dist) & mask2;
mask2 = (Q.row(2) < distanceThresh) & mask2;
Q = P2 * Q;
mask2 = (Q.row(2) > 0) & mask2;
mask2 = (Q.row(2) < dist) & mask2;
mask2 = (Q.row(2) < distanceThresh) & mask2;
triangulatePoints(P0, P3, points1, points2, Q);
if(triangulatedPoints.needed())
Q.copyTo(allTriangulations[2]);
Mat mask3 = Q.row(2).mul(Q.row(3)) > 0;
Q.row(0) /= Q.row(3);
Q.row(1) /= Q.row(3);
Q.row(2) /= Q.row(3);
Q.row(3) /= Q.row(3);
mask3 = (Q.row(2) < dist) & mask3;
mask3 = (Q.row(2) < distanceThresh) & mask3;
Q = P3 * Q;
mask3 = (Q.row(2) > 0) & mask3;
mask3 = (Q.row(2) < dist) & mask3;
mask3 = (Q.row(2) < distanceThresh) & mask3;
triangulatePoints(P0, P4, points1, points2, Q);
if(triangulatedPoints.needed())
Q.copyTo(allTriangulations[3]);
Mat mask4 = Q.row(2).mul(Q.row(3)) > 0;
Q.row(0) /= Q.row(3);
Q.row(1) /= Q.row(3);
Q.row(2) /= Q.row(3);
Q.row(3) /= Q.row(3);
mask4 = (Q.row(2) < dist) & mask4;
mask4 = (Q.row(2) < distanceThresh) & mask4;
Q = P4 * Q;
mask4 = (Q.row(2) > 0) & mask4;
mask4 = (Q.row(2) < dist) & mask4;
mask4 = (Q.row(2) < distanceThresh) & mask4;
mask1 = mask1.t();
mask2 = mask2.t();
......@@ -583,6 +593,7 @@ int cv::recoverPose( InputArray E, InputArray _points1, InputArray _points2, Inp
if (good1 >= good2 && good1 >= good3 && good1 >= good4)
{
if(triangulatedPoints.needed()) allTriangulations[0].copyTo(triangulatedPoints);
R1.copyTo(_R);
t.copyTo(_t);
if (_mask.needed()) mask1.copyTo(_mask);
......@@ -590,6 +601,7 @@ int cv::recoverPose( InputArray E, InputArray _points1, InputArray _points2, Inp
}
else if (good2 >= good1 && good2 >= good3 && good2 >= good4)
{
if(triangulatedPoints.needed()) allTriangulations[1].copyTo(triangulatedPoints);
R2.copyTo(_R);
t.copyTo(_t);
if (_mask.needed()) mask2.copyTo(_mask);
......@@ -597,6 +609,7 @@ int cv::recoverPose( InputArray E, InputArray _points1, InputArray _points2, Inp
}
else if (good3 >= good1 && good3 >= good2 && good3 >= good4)
{
if(triangulatedPoints.needed()) allTriangulations[2].copyTo(triangulatedPoints);
t = -t;
R1.copyTo(_R);
t.copyTo(_t);
......@@ -605,6 +618,7 @@ int cv::recoverPose( InputArray E, InputArray _points1, InputArray _points2, Inp
}
else
{
if(triangulatedPoints.needed()) allTriangulations[3].copyTo(triangulatedPoints);
t = -t;
R2.copyTo(_R);
t.copyTo(_t);
......@@ -613,6 +627,12 @@ int cv::recoverPose( InputArray E, InputArray _points1, InputArray _points2, Inp
}
}
int cv::recoverPose( InputArray E, InputArray _points1, InputArray _points2, InputArray _cameraMatrix,
OutputArray _R, OutputArray _t, InputOutputArray _mask)
{
return cv::recoverPose(E, _points1, _points2, _cameraMatrix, _R, _t, 50, _mask);
}
int cv::recoverPose( InputArray E, InputArray _points1, InputArray _points2, OutputArray _R,
OutputArray _t, double focal, Point2d pp, InputOutputArray _mask)
{
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册