当前位置:网站首页>(2) Hand eye calibration of face scanner and manipulator (eye out of hand: nine point calibration)
(2) Hand eye calibration of face scanner and manipulator (eye out of hand: nine point calibration)
2022-07-26 09:33:00 【Flying 123_ one hundred and twenty-three】
1、 To configure opencv Library and Eigen library
Need to opencv Matrix operation in
Need to Eigen in SVD decomposition algorithm
2、 Get point
The coordinates of the point relative to the calibration plate coordinate system
30 30 0
90 30 0
180 30 0
30 120 0
90 120 0
180 120 0
30 210 0
90 210 0
180 210 0
Save as board.txt file
The coordinates of the point relative to the camera coordinate system
-24.6 115.6 869.1
-85.7 115.6 869.1
-171.3 119.7 869.1
-24.5 21.9 872.1
-85.9 21.9 872.1
-175.5 26.0 872.1
-28.6 -68.1 874.0
-90.0 -64.0 874.0
-176.0 -64.0 874.0
Save as camera.txt file
You can copy a copy and save it as camera1.txt Used for testing
Obtain the coordinates of the point relative to the base coordinate system of the manipulator
465.09 -109.00 70.64
405.09 -106.00 71.64
314.09 -101.00 72.64
473.09 -31.00 70.64
413.09 -30.00 71.64
321.09 -28.00 72.64
483.09 50.00 70.64
424.09 50.00 71.64
337.09 48.00 72.64
Save as robot.txt
3、 Solver
#include <vector>
#include <opencv2/opencv.hpp>
#include <iostream>
#include <math.h>
#include <map>
#include <fstream>
#include <sstream>
#include <Eigen/SVD>
#include <Eigen/Core>
#include<opencv2/core/eigen.hpp>
using namespace std;
using namespace cv;
using namespace Eigen;
void Solve_board2target(string target, Mat& T_board2camera, Mat& R_board2camera);
void test_camera2robot(cv::Mat H_camera2robot);
class coordinate {
public:
double x;
double y;
double z;
};
int main()
{
cv::Mat tempR, tempT;
cv::Mat R_board2camera, T_board2camera;
cv::Mat R_board2robot, T_board2robot;
cv::Mat H_board2camera = Mat::eye(4, 4, CV_64FC1);
cv::Mat H_board2robot = Mat::eye(4, 4, CV_64FC1);
string target;
// Find the conversion matrix from the plate to the end of the manipulator
target = "robot";
Solve_board2target(target, T_board2robot, R_board2robot);
tempT = T_board2robot.clone();
tempR = R_board2robot.clone();
tempR({
0,0,3,3 }).copyTo(H_board2robot({
0,0,3,3 }));
tempT({
0,0,1,3 }).copyTo(H_board2robot({
3,0,1,3 }));
cout << "board2robot:" << endl;
cout << H_board2robot << endl;
cout << "______________________________________" << endl;
// Find the conversion matrix from the board to the camera
target = "camera";
Solve_board2target(target, T_board2camera, R_board2camera);
tempT = T_board2camera.clone();
tempR = R_board2camera.clone();
tempR({
0,0,3,3 }).copyTo(H_board2camera({
0,0,3,3 }));
tempT({
0,0,1,3 }).copyTo(H_board2camera({
3,0,1,3 }));
cout << "board2camera:" << endl;
cout << H_board2camera << endl;
// Seeking inverse , Conversion matrix from camera to phase plate
cv::Mat H_camera2board = H_board2camera.inv();
cout << "H_camera2board:" << endl;
cout << H_camera2board << endl;
cout << "______________________________________" << endl;
cv::Mat H_camera2robot = H_board2robot*H_camera2board;
cout << "camera2robot:" << endl;
cout << H_camera2robot << endl;
cout << "______________________________________" << endl;
test_camera2robot(H_camera2robot);
system("pause");
return 0;
}
// Calculation H_board2camera
//**************************************************************************
void Solve_board2target(string target, Mat& T_board2camera, Mat& R_board2camera)
{
// Calculation A2B
coordinate P_board, P_target, P_centroid;
vector<coordinate> v_board, v_target;
cv::Mat PA(3, 1, CV_64FC1);
cv::Mat PB(3, 1, CV_64FC1);
cv::Mat centroidA(3, 1, CV_64FC1);
cv::Mat centroidB(3, 1, CV_64FC1);
cv::Mat tempD, tempU, tempV;
cv::Mat H = Mat::zeros(3, 3, CV_64FC1);
cv::Mat tempH = Mat::eye(4, 4, CV_64FC1);
// Read point information And calculate the centroid coordinates
//------------------------------------------
P_centroid.x = 0;
P_centroid.y = 0;
P_centroid.z = 0;
// Read the coordinates of the points on the board and calculate the centroid
ifstream fin;
fin.open("..//board.txt", ios::in);
while (!fin.eof())
{
fin >> P_board.x >> P_board.y >> P_board.z;
///cout << P_board.x << " " << P_board.y << " " << P_board.z << endl;
v_board.push_back(P_board); // Storage
P_centroid.x = P_centroid.x + P_board.x;
P_centroid.y = P_centroid.y + P_board.y;
P_centroid.z = P_centroid.z + P_board.z;
}
fin.close();
// Calculating the center of mass
centroidA.at<double>(0.0) = P_centroid.x / v_board.size();
centroidA.at<double>(1.0) = P_centroid.y / v_board.size();
centroidA.at<double>(2.0) = P_centroid.z / v_board.size();
///cout << "centroidA:" << endl;
///cout << centroidA << endl;
// Read the coordinates of the midpoint of the camera
P_centroid.x = 0;
P_centroid.y = 0;
P_centroid.z = 0;
if (target == "camera")
{
fin.open("..//camera.txt", ios::in);
}
else if (target == "robot")
{
fin.open("..//robot.txt", ios::in);
}
else
{
cout << "targe Incorrect input !!" << endl;
return;
}
while (!fin.eof())
{
fin >> P_target.x >> P_target.y >> P_target.z;
///cout << P_target.x << " " << P_target.y << " " << P_target.z << endl;
v_target.push_back(P_target); // Storage
P_centroid.x = P_centroid.x + P_target.x;
P_centroid.y = P_centroid.y + P_target.y;
P_centroid.z = P_centroid.z + P_target.z;
}
fin.close();
// Calculating the center of mass
centroidB.at<double>(0.0) = P_centroid.x / v_target.size();
centroidB.at<double>(1.0) = P_centroid.y / v_target.size();
centroidB.at<double>(2.0) = P_centroid.z / v_target.size();
// Calculation H matrix
//--------------------------------------------------------------
for (int i = 0; i < v_board.size(); i++)
{
PA.at<double>(0, 0) = v_board[i].x;
PA.at<double>(1, 0) = v_board[i].y;
PA.at<double>(2, 0) = v_board[i].z;
PB.at<double>(0, 0) = v_target[i].x;
PB.at<double>(1, 0) = v_target[i].y;
PB.at<double>(2, 0) = v_target[i].z;
H = H + (PA - centroidA)*((PB - centroidB).t());
}
///cout << "H:" << endl;
///cout << H << endl;
Eigen::MatrixXd SVD_H(3, 3);
cv2eigen(H, SVD_H); //Mat Matrix transformation Eigen matrix
//SVD decompose (H) And calculate the rotation matrix and translation matrix
//------------------------------------------------------------------
///cout << "SVD_H:" << endl;
///cout << SVD_H << endl;
SVD::compute(H, tempD, tempU, tempV, 0);
//-------------------------------------------------------------------
// Conduct svd decompose
Eigen::JacobiSVD<Eigen::MatrixXd> svd_holder(SVD_H,
Eigen::ComputeThinU |
Eigen::ComputeThinV);
// structure SVD Decomposition results
Eigen::MatrixXd U = svd_holder.matrixU();
Eigen::MatrixXd V = svd_holder.matrixV();
Eigen::MatrixXd D = svd_holder.singularValues();
//--------------------------------------------------------------------
//eigen Matrix transformation Mat matrix
eigen2cv(U, tempU);
eigen2cv(V, tempV);
eigen2cv(D, tempD);
// Calculate the rotation matrix and translation matrix from the calibration plate to the camera
R_board2camera = tempV * (tempU.t());
T_board2camera = centroidB - (R_board2camera * centroidA);
}
// test camera2robot
void test_camera2robot(cv::Mat H_camera2robot)
{
coordinate P_camera;
vector<coordinate> v_camera;
cv::Mat PTemp (4, 1, CV_64FC1);
cv::Mat PCamera(4, 1, CV_64FC1);
cv::Mat PRobot (4, 1, CV_64FC1);
ifstream fin;
fin.open("..//camera1.txt", ios::in);
while (!fin.eof())
{
fin >> P_camera.x >> P_camera.y >> P_camera.z;
cout << P_camera.x << " " << P_camera.y << " " << P_camera.z << endl;
v_camera.push_back(P_camera); // Storage
}
fin.close();
double tempAA[1][3];
for (int i = 0; i < v_camera.size(); i++)
{
cout << i << endl;
PTemp.at<double>(0, 0) = v_camera[i].x;
PTemp.at<double>(1, 0) = v_camera[i].y;
PTemp.at<double>(2, 0) = v_camera[i].z;
PTemp.at<double>(3, 0) = 1;
PCamera = PTemp.clone();
///cout << "PCamera:" << endl;
///cout << PCamera << endl;
///cout << "H_camera2robot" << endl;
///cout << H_camera2robot << endl;
//cout << H_camera2robot << endl;
PRobot = H_camera2robot * PCamera;
///cout << "PRobot:" << endl;
///cout << PRobot << endl;
tempAA[0][0] = PRobot.at<double>(0, 0);
tempAA[0][1] = PRobot.at<double>(1, 0);
tempAA[0][2] = PRobot.at<double>(2, 0);
cout << tempAA[0][0] << " " << tempAA[0][1] << " " << tempAA[0][2] << endl;
}
}
边栏推荐
猜你喜欢
随机推荐
QT随手笔记(六)——更新界面、截图、文件对话框
青少年软件编程等级考试标准解读_二级
Does volatile rely on the MESI protocol to solve the visibility problem? (top)
[shutter -- layout] detailed explanation of the use of align, center and padding
TabbarController的封装
tabbarController的使用
The difference between thread join and object wait
arcgis的基本使用1
高斯消元求解异或线性方程组
EOJ 2020 1月月赛 E数的变换
服务器、客户端双认证
模板(三)
uni-app学习总结
Implementation of fragment lazy loading after multi-layer nesting
asp.net 使用redis缓存(二)
el-table的formatter属性的用法
设置视图动态图片
Fiddler packet capturing tool for mobile packet capturing
Byte buffer stream & character stream explanation
微信小程序开发