提交 1bea5377 编写于 作者: A Alexander Alekhin

Merge pull request #18165 from catree:fix_hand_eye_calibration_Andreff_NaN_3.4

......@@ -1989,23 +1989,23 @@ CV_EXPORTS_W Mat getOptimalNewCameraMatrix( InputArray cameraMatrix, InputArray
@param[in] R_gripper2base Rotation part extracted from the homogeneous matrix that transforms a point
expressed in the gripper frame to the robot base frame (\f$_{}^{b}\textrm{T}_g\f$).
This is a vector (`vector<Mat>`) that contains the rotation matrices for all the transformations
from gripper frame to robot base frame.
This is a vector (`vector<Mat>`) that contains the rotation, `(3x3)` rotation matrices or `(3x1)` rotation vectors,
for all the transformations from gripper frame to robot base frame.
@param[in] t_gripper2base Translation part extracted from the homogeneous matrix that transforms a point
expressed in the gripper frame to the robot base frame (\f$_{}^{b}\textrm{T}_g\f$).
This is a vector (`vector<Mat>`) that contains the translation vectors for all the transformations
This is a vector (`vector<Mat>`) that contains the `(3x1)` translation vectors for all the transformations
from gripper frame to robot base frame.
@param[in] R_target2cam Rotation part extracted from the homogeneous matrix that transforms a point
expressed in the target frame to the camera frame (\f$_{}^{c}\textrm{T}_t\f$).
This is a vector (`vector<Mat>`) that contains the rotation matrices for all the transformations
from calibration target frame to camera frame.
This is a vector (`vector<Mat>`) that contains the rotation, `(3x3)` rotation matrices or `(3x1)` rotation vectors,
for all the transformations from calibration target frame to camera frame.
@param[in] t_target2cam Rotation part extracted from the homogeneous matrix that transforms a point
expressed in the target frame to the camera frame (\f$_{}^{c}\textrm{T}_t\f$).
This is a vector (`vector<Mat>`) that contains the translation vectors for all the transformations
This is a vector (`vector<Mat>`) that contains the `(3x1)` translation vectors for all the transformations
from calibration target frame to camera frame.
@param[out] R_cam2gripper Estimated rotation part extracted from the homogeneous matrix that transforms a point
@param[out] R_cam2gripper Estimated `(3x3)` rotation part extracted from the homogeneous matrix that transforms a point
expressed in the camera frame to the gripper frame (\f$_{}^{g}\textrm{T}_c\f$).
@param[out] t_cam2gripper Estimated translation part extracted from the homogeneous matrix that transforms a point
@param[out] t_cam2gripper Estimated `(3x1)` translation part extracted from the homogeneous matrix that transforms a point
expressed in the camera frame to the gripper frame (\f$_{}^{g}\textrm{T}_c\f$).
@param[in] method One of the implemented Hand-Eye calibration method, see cv::HandEyeCalibrationMethod
......
......@@ -29,6 +29,7 @@ static Mat homogeneousInverse(const Mat& T)
// q = sin(theta/2) * v
// theta - rotation angle
// v - unit rotation axis, |v| = 1
// Reference: http://www.euclideanspace.com/maths/geometry/rotations/conversions/matrixToQuaternion/
static Mat rot2quatMinimal(const Mat& R)
{
CV_Assert(R.type() == CV_64FC1 && R.rows >= 3 && R.cols >= 3);
......@@ -44,7 +45,7 @@ static Mat rot2quatMinimal(const Mat& R)
qx = (m21 - m12) / S;
qy = (m02 - m20) / S;
qz = (m10 - m01) / S;
} else if ((m00 > m11)&(m00 > m22)) {
} else if (m00 > m11 && m00 > m22) {
double S = sqrt(1.0 + m00 - m11 - m22) * 2; // S=4*qx
qx = 0.25 * S;
qy = (m01 + m10) / S;
......@@ -98,6 +99,7 @@ static Mat quatMinimal2rot(const Mat& q)
//
// q - 4x1 unit quaternion <qw, qx, qy, qz>
// R - 3x3 rotation matrix
// Reference: http://www.euclideanspace.com/maths/geometry/rotations/conversions/matrixToQuaternion/
static Mat rot2quat(const Mat& R)
{
CV_Assert(R.type() == CV_64FC1 && R.rows >= 3 && R.cols >= 3);
......@@ -114,7 +116,7 @@ static Mat rot2quat(const Mat& R)
qx = (m21 - m12) / S;
qy = (m02 - m20) / S;
qz = (m10 - m01) / S;
} else if ((m00 > m11)&(m00 > m22)) {
} else if (m00 > m11 && m00 > m22) {
double S = sqrt(1.0 + m00 - m11 - m22) * 2; // S=4*qx
qw = (m21 - m12) / S;
qx = 0.25 * S;
......@@ -572,7 +574,11 @@ static void calibrateHandEyeAndreff(const std::vector<Mat>& Hg, const std::vecto
R = R.reshape(1, 2, newSize);
//Eq 15
double det = determinant(R);
R = pow(sign_double(det) / abs(det), 1.0/3.0) * R;
if (std::fabs(det) < FLT_EPSILON)
{
CV_Error(Error::StsNoConv, "calibrateHandEye() with CALIB_HAND_EYE_ANDREFF method: determinant(R) is null");
}
R = cubeRoot(static_cast<float>(sign_double(det) / abs(det))) * R;
Mat w, u, vt;
SVDecomp(R, w, u, vt);
......
......@@ -7,6 +7,38 @@
namespace opencv_test { namespace {
static std::string getMethodName(HandEyeCalibrationMethod method)
{
std::string method_name = "";
switch (method)
{
case CALIB_HAND_EYE_TSAI:
method_name = "Tsai";
break;
case CALIB_HAND_EYE_PARK:
method_name = "Park";
break;
case CALIB_HAND_EYE_HORAUD:
method_name = "Horaud";
break;
case CALIB_HAND_EYE_ANDREFF:
method_name = "Andreff";
break;
case CALIB_HAND_EYE_DANIILIDIS:
method_name = "Daniilidis";
break;
default:
break;
}
return method_name;
}
class CV_CalibrateHandEyeTest : public cvtest::BaseTest
{
public:
......@@ -48,7 +80,6 @@ protected:
std::vector<Mat> &R_target2cam, std::vector<Mat> &t_target2cam,
bool noise, Mat& R_cam2gripper, Mat& t_cam2gripper);
Mat homogeneousInverse(const Mat& T);
std::string getMethodName(HandEyeCalibrationMethod method);
double sign_double(double val);
double eps_rvec[5];
......@@ -340,45 +371,95 @@ Mat CV_CalibrateHandEyeTest::homogeneousInverse(const Mat& T)
return Tinv;
}
std::string CV_CalibrateHandEyeTest::getMethodName(HandEyeCalibrationMethod method)
double CV_CalibrateHandEyeTest::sign_double(double val)
{
std::string method_name = "";
switch (method)
{
case CALIB_HAND_EYE_TSAI:
method_name = "Tsai";
break;
return (0 < val) - (val < 0);
}
case CALIB_HAND_EYE_PARK:
method_name = "Park";
break;
///////////////////////////////////////////////////////////////////////////////////////////////////
case CALIB_HAND_EYE_HORAUD:
method_name = "Horaud";
break;
TEST(Calib3d_CalibrateHandEye, regression) { CV_CalibrateHandEyeTest test; test.safe_run(); }
case CALIB_HAND_EYE_ANDREFF:
method_name = "Andreff";
break;
TEST(Calib3d_CalibrateHandEye, regression_17986)
{
const std::string camera_poses_filename = findDataFile("cv/hand_eye_calibration/cali.txt");
const std::string end_effector_poses = findDataFile("cv/hand_eye_calibration/robot_cali.txt");
case CALIB_HAND_EYE_DANIILIDIS:
method_name = "Daniilidis";
break;
std::vector<Mat> R_target2cam;
std::vector<Mat> t_target2cam;
// Parse camera poses
{
std::ifstream file(camera_poses_filename.c_str());
ASSERT_TRUE(file.is_open());
int ndata = 0;
file >> ndata;
R_target2cam.reserve(ndata);
t_target2cam.reserve(ndata);
std::string image_name;
Matx33d cameraMatrix;
Matx33d R;
Matx31d t;
Matx16d distCoeffs;
Matx13d distCoeffs2;
while (file >> image_name >>
cameraMatrix(0,0) >> cameraMatrix(0,1) >> cameraMatrix(0,2) >>
cameraMatrix(1,0) >> cameraMatrix(1,1) >> cameraMatrix(1,2) >>
cameraMatrix(2,0) >> cameraMatrix(2,1) >> cameraMatrix(2,2) >>
R(0,0) >> R(0,1) >> R(0,2) >>
R(1,0) >> R(1,1) >> R(1,2) >>
R(2,0) >> R(2,1) >> R(2,2) >>
t(0) >> t(1) >> t(2) >>
distCoeffs(0) >> distCoeffs(1) >> distCoeffs(2) >> distCoeffs(3) >> distCoeffs(4) >>
distCoeffs2(0) >> distCoeffs2(1) >> distCoeffs2(2)) {
R_target2cam.push_back(Mat(R));
t_target2cam.push_back(Mat(t));
}
}
default:
break;
std::vector<Mat> R_gripper2base;
std::vector<Mat> t_gripper2base;
// Parse end-effector poses
{
std::ifstream file(end_effector_poses.c_str());
ASSERT_TRUE(file.is_open());
int ndata = 0;
file >> ndata;
R_gripper2base.reserve(ndata);
t_gripper2base.reserve(ndata);
Matx33d R;
Matx31d t;
Matx14d last_row;
while (file >>
R(0,0) >> R(0,1) >> R(0,2) >> t(0) >>
R(1,0) >> R(1,1) >> R(1,2) >> t(1) >>
R(2,0) >> R(2,1) >> R(2,2) >> t(2) >>
last_row(0) >> last_row(1) >> last_row(2) >> last_row(3)) {
R_gripper2base.push_back(Mat(R));
t_gripper2base.push_back(Mat(t));
}
}
return method_name;
}
std::vector<HandEyeCalibrationMethod> methods;
methods.push_back(CALIB_HAND_EYE_TSAI);
methods.push_back(CALIB_HAND_EYE_PARK);
methods.push_back(CALIB_HAND_EYE_HORAUD);
methods.push_back(CALIB_HAND_EYE_ANDREFF);
methods.push_back(CALIB_HAND_EYE_DANIILIDIS);
double CV_CalibrateHandEyeTest::sign_double(double val)
{
return (0 < val) - (val < 0);
}
for (size_t idx = 0; idx < methods.size(); idx++) {
SCOPED_TRACE(cv::format("method=%s", getMethodName(methods[idx]).c_str()));
///////////////////////////////////////////////////////////////////////////////////////////////////
Matx33d R_cam2gripper_est;
Matx31d t_cam2gripper_est;
calibrateHandEye(R_gripper2base, t_gripper2base, R_target2cam, t_target2cam, R_cam2gripper_est, t_cam2gripper_est, methods[idx]);
TEST(Calib3d_CalibrateHandEye, regression) { CV_CalibrateHandEyeTest test; test.safe_run(); }
EXPECT_TRUE(checkRange(R_cam2gripper_est));
EXPECT_TRUE(checkRange(t_cam2gripper_est));
}
}
}} // namespace
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册