赞
踩
该程序可以实现多种标定板的相机标定工作
<?xml version="1.0"?>
<!-- 相机拍摄的标定板图像路径名 -->
<opencv_storage>
<images>
images/CameraCalibration/VID5/xx1.jpg
images/CameraCalibration/VID5/xx2.jpg
images/CameraCalibration/VID5/xx3.jpg
images/CameraCalibration/VID5/xx4.jpg
images/CameraCalibration/VID5/xx5.jpg
images/CameraCalibration/VID5/xx6.jpg
images/CameraCalibration/VID5/xx7.jpg
images/CameraCalibration/VID5/xx8.jpg
</images>
</opencv_storage>
<?xml version="1.0"?> <opencv_storage> <Settings> <!-- 标定板尺寸. (可以是正方形、圆形) --> <BoardSize_Width>9</BoardSize_Width> <BoardSize_Height>6</BoardSize_Height> <!-- 用户定义的方格的尺寸 (像素,毫米)--> <Square_Size>50</Square_Size> <Marker_Size>25</Marker_Size> <!-- 相机标定所使用的标定板类型. 可以是CHESSBOARD CHARUCOBOARD CIRCLES_GRID ASYMMETRIC_CIRCLES_GRID --> <Calibrate_Pattern>"CHESSBOARD"</Calibrate_Pattern> <ArUco_Dict_Name>DICT_4X4_50</ArUco_Dict_Name> <ArUco_Dict_File_Name></ArUco_Dict_File_Name> <!-- 用于标定的输入来源。 使用输入摄像头 -> 提供摄像头的ID,例如 "1" 使用输入视频 -> 提供输入视频的路径,例如 "/tmp/x.avi" 使用图像列表 -> 提供含有图像列表的XML或YAML文件的路径,例如 "/tmp/circles_list.xml" --> <Input>"images/CameraCalibration/VID5/VID5.xml"</Input> <!-- 如果为真(非零),则沿水平轴翻转输入图像 --> <Input_FlipAroundHorizontalAxis>0</Input_FlipAroundHorizontalAxis> <!-- 摄像头的帧之间的时间延迟 --> <Input_Delay>100</Input_Delay> <!-- 用于标定的帧数量 --> <Calibrate_NrOfFrameToUse>25</Calibrate_NrOfFrameToUse> <!-- 只考虑fy作为自由参数,比率fx/fy与输入cameraMatrix中的相同 --> <Calibrate_FixAspectRatio> 1 </Calibrate_FixAspectRatio> <!-- 如果为真(非零),切向畸变系数将被设置为零并保持为零 --> <Calibrate_AssumeZeroTangentialDistortion>1</Calibrate_AssumeZeroTangentialDistortion> <!-- 如果为真(非零),在全局优化过程中主点不会改变 --> <Calibrate_FixPrincipalPointAtTheCenter> 1 </Calibrate_FixPrincipalPointAtTheCenter> <!-- 输出日志文件名 --> <Write_outputFileName>"out_camera_data.xml"</Write_outputFileName> <!-- 如果为真(非零),将检测到的特征点写入输出文件 --> <Write_DetectedFeaturePoints>1</Write_DetectedFeaturePoints> <!-- 如果为真(非零),我们将外部相机参数写入输出文件 --> <Write_extrinsicParameters>1</Write_extrinsicParameters> <!-- 如果为真(非零),我们将优化后的3D目标网格点写入输出文件 --> <Write_gridPoints>1</Write_gridPoints> <!-- 如果为真(非零),校准后我们显示无畸变的图像 --> <Show_UndistortedImage>1</Show_UndistortedImage> <!-- 如果为真(非零),将使用鱼眼相机模型进行标定 --> <Calibrate_UseFisheyeModel>0</Calibrate_UseFisheyeModel> <!-- 如果为真(非零),畸变系数k1将等于零 --> <Fix_K1>0</Fix_K1> <!-- 如果为真(非零),畸变系数k2将等于零 --> <Fix_K2>0</Fix_K2> <!-- 如果为真(非零),畸变系数k3将等于零 --> <Fix_K3>0</Fix_K3> <!-- 如果为真(非零),畸变系数k4将等于零 --> <Fix_K4>1</Fix_K4> <!-- 如果为真(非零),畸变系数k5将等于零 --> <Fix_K5>1</Fix_K5> </Settings> </opencv_storage>
核心代码就是camera_calibration.cpp,主要通过多张标定板图像进行相机的内参和畸变参数的计算,大体看了一下,里面的逻辑很清晰,就不做过多注解了
#include <iostream> #include <sstream> #include <string> #include <ctime> #include <cstdio> #include <opencv2/core.hpp> #include <opencv2/core/utility.hpp> #include <opencv2/imgproc.hpp> #include <opencv2/calib3d.hpp> #include <opencv2/imgcodecs.hpp> #include <opencv2/videoio.hpp> #include <opencv2/highgui.hpp> #include "opencv2/objdetect/charuco_detector.hpp" using namespace cv; using namespace std; class Settings { public: Settings() : goodInput(false) {} enum Pattern { NOT_EXISTING, CHESSBOARD, CHARUCOBOARD, CIRCLES_GRID, ASYMMETRIC_CIRCLES_GRID }; enum InputType { INVALID, CAMERA, VIDEO_FILE, IMAGE_LIST }; void write(FileStorage& fs) const //将数据写入文件 { fs << "{" << "BoardSize_Width" << boardSize.width << "BoardSize_Height" << boardSize.height << "Square_Size" << squareSize << "Marker_Size" << markerSize << "Calibrate_Pattern" << patternToUse << "ArUco_Dict_Name" << arucoDictName << "ArUco_Dict_File_Name" << arucoDictFileName << "Calibrate_NrOfFrameToUse" << nrFrames << "Calibrate_FixAspectRatio" << aspectRatio << "Calibrate_AssumeZeroTangentialDistortion" << calibZeroTangentDist << "Calibrate_FixPrincipalPointAtTheCenter" << calibFixPrincipalPoint << "Write_DetectedFeaturePoints" << writePoints << "Write_extrinsicParameters" << writeExtrinsics << "Write_gridPoints" << writeGrid << "Write_outputFileName" << outputFileName << "Show_UndistortedImage" << showUndistorted << "Input_FlipAroundHorizontalAxis" << flipVertical << "Input_Delay" << delay << "Input" << input << "}"; } void read(const FileNode& node) //从文件中读 { node["BoardSize_Width"] >> boardSize.width; node["BoardSize_Height"] >> boardSize.height; node["Calibrate_Pattern"] >> patternToUse; node["ArUco_Dict_Name"] >> arucoDictName; node["ArUco_Dict_File_Name"] >> arucoDictFileName; node["Square_Size"] >> squareSize; node["Marker_Size"] >> markerSize; node["Calibrate_NrOfFrameToUse"] >> nrFrames; node["Calibrate_FixAspectRatio"] >> aspectRatio; node["Write_DetectedFeaturePoints"] >> writePoints; node["Write_extrinsicParameters"] >> writeExtrinsics; node["Write_gridPoints"] >> writeGrid; node["Write_outputFileName"] >> outputFileName; node["Calibrate_AssumeZeroTangentialDistortion"] >> calibZeroTangentDist; node["Calibrate_FixPrincipalPointAtTheCenter"] >> calibFixPrincipalPoint; node["Calibrate_UseFisheyeModel"] >> useFisheye; node["Input_FlipAroundHorizontalAxis"] >> flipVertical; node["Show_UndistortedImage"] >> showUndistorted; node["Input"] >> input; node["Input_Delay"] >> delay; node["Fix_K1"] >> fixK1; node["Fix_K2"] >> fixK2; node["Fix_K3"] >> fixK3; node["Fix_K4"] >> fixK4; node["Fix_K5"] >> fixK5; validate(); } // 输入值验证 void validate() { goodInput = true; if (boardSize.width <= 0 || boardSize.height <= 0) { cerr << "Invalid Board size: " << boardSize.width << " " << boardSize.height << endl; goodInput = false; } if (squareSize <= 10e-6) { cerr << "Invalid square size " << squareSize << endl; goodInput = false; } if (nrFrames <= 0) { cerr << "Invalid number of frames " << nrFrames << endl; goodInput = false; } if (input.empty()) // Check for valid input inputType = INVALID; else { if (input[0] >= '0' && input[0] <= '9') { stringstream ss(input); ss >> cameraID; inputType = CAMERA; } else { if (isListOfImages(input) && readStringList(input, imageList)) { inputType = IMAGE_LIST; nrFrames = (nrFrames < (int)imageList.size()) ? nrFrames : (int)imageList.size(); } else inputType = VIDEO_FILE; } if (inputType == CAMERA) inputCapture.open(cameraID); if (inputType == VIDEO_FILE) inputCapture.open(input); if (inputType != IMAGE_LIST && !inputCapture.isOpened()) inputType = INVALID; } if (inputType == INVALID) { cerr << " Input does not exist: " << input; goodInput = false; } flag = 0; if(calibFixPrincipalPoint) flag |= CALIB_FIX_PRINCIPAL_POINT; if(calibZeroTangentDist) flag |= CALIB_ZERO_TANGENT_DIST; if(aspectRatio) flag |= CALIB_FIX_ASPECT_RATIO; if(fixK1) flag |= CALIB_FIX_K1; if(fixK2) flag |= CALIB_FIX_K2; if(fixK3) flag |= CALIB_FIX_K3; if(fixK4) flag |= CALIB_FIX_K4; if(fixK5) flag |= CALIB_FIX_K5; if (useFisheye) { // the fisheye model has its own enum, so overwrite the flags flag = fisheye::CALIB_FIX_SKEW | fisheye::CALIB_RECOMPUTE_EXTRINSIC; if(fixK1) flag |= fisheye::CALIB_FIX_K1; if(fixK2) flag |= fisheye::CALIB_FIX_K2; if(fixK3) flag |= fisheye::CALIB_FIX_K3; if(fixK4) flag |= fisheye::CALIB_FIX_K4; if (calibFixPrincipalPoint) flag |= fisheye::CALIB_FIX_PRINCIPAL_POINT; } calibrationPattern = NOT_EXISTING; if (!patternToUse.compare("CHESSBOARD")) calibrationPattern = CHESSBOARD; if (!patternToUse.compare("CHARUCOBOARD")) calibrationPattern = CHARUCOBOARD; if (!patternToUse.compare("CIRCLES_GRID")) calibrationPattern = CIRCLES_GRID; if (!patternToUse.compare("ASYMMETRIC_CIRCLES_GRID")) calibrationPattern = ASYMMETRIC_CIRCLES_GRID; if (calibrationPattern == NOT_EXISTING) { cerr << " Camera calibration mode does not exist: " << patternToUse << endl; goodInput = false; } atImageList = 0; } // 获取图像 Mat nextImage() { Mat result; if( inputCapture.isOpened() ) { Mat view0; inputCapture >> view0; view0.copyTo(result); } else if( atImageList < imageList.size() ) result = imread(imageList[atImageList++], IMREAD_COLOR); return result; } //读取图像名,保存在vector static bool readStringList( const string& filename, vector<string>& l ) { l.clear(); FileStorage fs(filename, FileStorage::READ); if( !fs.isOpened() ) return false; FileNode n = fs.getFirstTopLevelNode(); if( n.type() != FileNode::SEQ ) return false; FileNodeIterator it = n.begin(), it_end = n.end(); for( ; it != it_end; ++it ) l.push_back((string)*it); return true; } static bool isListOfImages( const string& filename) { string s(filename); // Look for file extension if( s.find(".xml") == string::npos && s.find(".yaml") == string::npos && s.find(".yml") == string::npos ) return false; else return true; } public: Size boardSize; // The size of the board -> Number of items by width and height Pattern calibrationPattern; // One of the Chessboard, ChArUco board, circles, or asymmetric circle pattern float squareSize; // The size of a square in your defined unit (point, millimeter,etc). float markerSize; // The size of a marker in your defined unit (point, millimeter,etc). string arucoDictName; // The Name of ArUco dictionary which you use in ChArUco pattern string arucoDictFileName; // The Name of file which contains ArUco dictionary for ChArUco pattern int nrFrames; // The number of frames to use from the input for calibration float aspectRatio; // The aspect ratio int delay; // In case of a video input bool writePoints; // Write detected feature points bool writeExtrinsics; // Write extrinsic parameters bool writeGrid; // Write refined 3D target grid points bool calibZeroTangentDist; // Assume zero tangential distortion bool calibFixPrincipalPoint; // Fix the principal point at the center bool flipVertical; // Flip the captured images around the horizontal axis string outputFileName; // The name of the file where to write bool showUndistorted; // Show undistorted images after calibration string input; // The input -> bool useFisheye; // use fisheye camera model for calibration bool fixK1; // fix K1 distortion coefficient bool fixK2; // fix K2 distortion coefficient bool fixK3; // fix K3 distortion coefficient bool fixK4; // fix K4 distortion coefficient bool fixK5; // fix K5 distortion coefficient int cameraID; vector<string> imageList; size_t atImageList; VideoCapture inputCapture; InputType inputType; bool goodInput; int flag; private: string patternToUse; }; static inline void read(const FileNode& node, Settings& x, const Settings& default_value = Settings()) { if(node.empty()) x = default_value; else x.read(node); } enum { DETECTION = 0, CAPTURING = 1, CALIBRATED = 2 }; bool runCalibrationAndSave(Settings& s, Size imageSize, Mat& cameraMatrix, Mat& distCoeffs, vector<vector<Point2f> > imagePoints, float grid_width, bool release_object); int main(int argc, char* argv[]) { const String keys = "{help h usage ? | | print this message }" "{@settings |default.xml| input setting file }" "{d | | actual distance between top-left and top-right corners of " "the calibration grid }" "{winSize | 11 | Half of search window for cornerSubPix }"; CommandLineParser parser(argc, argv, keys); parser.about("This is a camera calibration sample.\n" "Usage: camera_calibration [configuration_file -- default ./default.xml]\n" "Near the sample file you'll find the configuration file, which has detailed help of " "how to edit it. It may be any OpenCV supported file format XML/YAML."); if (!parser.check()) { parser.printErrors(); return 0; } if (parser.has("help")) { parser.printMessage(); return 0; } //! [file_read] Settings s; const string inputSettingsFile = parser.get<string>(0); FileStorage fs(inputSettingsFile, FileStorage::READ); // Read the settings if (!fs.isOpened()) { cout << "Could not open the configuration file: \"" << inputSettingsFile << "\"" << endl; parser.printMessage(); return -1; } fs["Settings"] >> s; fs.release(); // close Settings file //! [file_read] if (!s.goodInput) { cout << "Invalid input detected. Application stopping. " << endl; return -1; } int winSize = parser.get<int>("winSize"); // 获取角点搜索窗口大小的一半 float grid_width = s.squareSize * (s.boardSize.width - 1); if (s.calibrationPattern == Settings::Pattern::CHARUCOBOARD) { grid_width = s.squareSize * (s.boardSize.width - 2); } bool release_object = false; if (parser.has("d")) { grid_width = parser.get<float>("d"); release_object = true; } // 创建CharucoBoard棋盘对象 cv::aruco::Dictionary dictionary; // 如果标定模式为CHARUCOBOARD,创建相应的字典 if (s.calibrationPattern == Settings::CHARUCOBOARD) { if (s.arucoDictFileName == "") { cv::aruco::PredefinedDictionaryType arucoDict; if (s.arucoDictName == "DICT_4X4_50") { arucoDict = cv::aruco::DICT_4X4_50; } else if (s.arucoDictName == "DICT_4X4_100") { arucoDict = cv::aruco::DICT_4X4_100; } else if (s.arucoDictName == "DICT_4X4_250") { arucoDict = cv::aruco::DICT_4X4_250; } else if (s.arucoDictName == "DICT_4X4_1000") { arucoDict = cv::aruco::DICT_4X4_1000; } else if (s.arucoDictName == "DICT_5X5_50") { arucoDict = cv::aruco::DICT_5X5_50; } else if (s.arucoDictName == "DICT_5X5_100") { arucoDict = cv::aruco::DICT_5X5_100; } else if (s.arucoDictName == "DICT_5X5_250") { arucoDict = cv::aruco::DICT_5X5_250; } else if (s.arucoDictName == "DICT_5X5_1000") { arucoDict = cv::aruco::DICT_5X5_1000; } else if (s.arucoDictName == "DICT_6X6_50") { arucoDict = cv::aruco::DICT_6X6_50; } else if (s.arucoDictName == "DICT_6X6_100") { arucoDict = cv::aruco::DICT_6X6_100; } else if (s.arucoDictName == "DICT_6X6_250") { arucoDict = cv::aruco::DICT_6X6_250; } else if (s.arucoDictName == "DICT_6X6_1000") { arucoDict = cv::aruco::DICT_6X6_1000; } else if (s.arucoDictName == "DICT_7X7_50") { arucoDict = cv::aruco::DICT_7X7_50; } else if (s.arucoDictName == "DICT_7X7_100") { arucoDict = cv::aruco::DICT_7X7_100; } else if (s.arucoDictName == "DICT_7X7_250") { arucoDict = cv::aruco::DICT_7X7_250; } else if (s.arucoDictName == "DICT_7X7_1000") { arucoDict = cv::aruco::DICT_7X7_1000; } else if (s.arucoDictName == "DICT_ARUCO_ORIGINAL") { arucoDict = cv::aruco::DICT_ARUCO_ORIGINAL; } else if (s.arucoDictName == "DICT_APRILTAG_16h5") { arucoDict = cv::aruco::DICT_APRILTAG_16h5; } else if (s.arucoDictName == "DICT_APRILTAG_25h9") { arucoDict = cv::aruco::DICT_APRILTAG_25h9; } else if (s.arucoDictName == "DICT_APRILTAG_36h10") { arucoDict = cv::aruco::DICT_APRILTAG_36h10; } else if (s.arucoDictName == "DICT_APRILTAG_36h11") { arucoDict = cv::aruco::DICT_APRILTAG_36h11; } else { cout << "incorrect name of aruco dictionary \n"; return 1; } dictionary = cv::aruco::getPredefinedDictionary(arucoDict); } else { cv::FileStorage dict_file(s.arucoDictFileName, cv::FileStorage::Mode::READ); cv::FileNode fn(dict_file.root()); dictionary.readDictionary(fn); } } else { // default dictionary dictionary = cv::aruco::getPredefinedDictionary(0); } // 创建CharucoBoard对象和检测器 cv::aruco::CharucoBoard ch_board({s.boardSize.width, s.boardSize.height}, s.squareSize, s.markerSize, dictionary); cv::aruco::CharucoDetector ch_detector(ch_board); std::vector<int> markerIds; vector<vector<Point2f> > imagePoints; Mat cameraMatrix, distCoeffs; Size imageSize; int mode = s.inputType == Settings::IMAGE_LIST ? CAPTURING : DETECTION; clock_t prevTimestamp = 0; const Scalar RED(0,0,255), GREEN(0,255,0); const char ESC_KEY = 27; //! [get_input] // 循环处理图像 for(;;) { Mat view; bool blinkOutput = false; view = s.nextImage(); //----- If no more image, or got enough, then stop calibration and show result ------------- if( mode == CAPTURING && imagePoints.size() >= (size_t)s.nrFrames ) { // 调用标定函数,成功则切换到CALIBRATED模式,否则回到DETECTION模式 if(runCalibrationAndSave(s, imageSize, cameraMatrix, distCoeffs, imagePoints, grid_width, release_object)) mode = CALIBRATED; else mode = DETECTION; } if(view.empty()) // If there are no more images stop the loop { // if calibration threshold was not reached yet, calibrate now if( mode != CALIBRATED && !imagePoints.empty() ) runCalibrationAndSave(s, imageSize, cameraMatrix, distCoeffs, imagePoints, grid_width, release_object); break; } //! [get_input] imageSize = view.size(); // Format input image. if( s.flipVertical ) flip( view, view, 0 ); //! [find_pattern] vector<Point2f> pointBuf; bool found; int chessBoardFlags = CALIB_CB_ADAPTIVE_THRESH | CALIB_CB_NORMALIZE_IMAGE; if(!s.useFisheye) { // fast check erroneously fails with high distortions like fisheye chessBoardFlags |= CALIB_CB_FAST_CHECK; } switch( s.calibrationPattern ) // Find feature points on the input format { case Settings::CHESSBOARD: found = findChessboardCorners( view, s.boardSize, pointBuf, chessBoardFlags); break; case Settings::CHARUCOBOARD: ch_detector.detectBoard( view, pointBuf, markerIds); found = pointBuf.size() == (size_t)((s.boardSize.height - 1)*(s.boardSize.width - 1)); break; case Settings::CIRCLES_GRID: found = findCirclesGrid( view, s.boardSize, pointBuf ); break; case Settings::ASYMMETRIC_CIRCLES_GRID: found = findCirclesGrid( view, s.boardSize, pointBuf, CALIB_CB_ASYMMETRIC_GRID ); break; default: found = false; break; } //! [find_pattern] //! [pattern_found] if (found) // If done with success, { // improve the found corners' coordinate accuracy for chessboard if( s.calibrationPattern == Settings::CHESSBOARD) { Mat viewGray; cvtColor(view, viewGray, COLOR_BGR2GRAY); cornerSubPix( viewGray, pointBuf, Size(winSize,winSize), Size(-1,-1), TermCriteria( TermCriteria::EPS+TermCriteria::COUNT, 30, 0.0001 )); } if( mode == CAPTURING && // For camera only take new samples after delay time (!s.inputCapture.isOpened() || clock() - prevTimestamp > s.delay*1e-3*CLOCKS_PER_SEC) ) { imagePoints.push_back(pointBuf); prevTimestamp = clock(); blinkOutput = s.inputCapture.isOpened(); } // Draw the corners. if(s.calibrationPattern == Settings::CHARUCOBOARD) drawChessboardCorners( view, cv::Size(s.boardSize.width-1, s.boardSize.height-1), Mat(pointBuf), found ); else drawChessboardCorners( view, s.boardSize, Mat(pointBuf), found ); } //! [pattern_found] //----------------------------- Output Text ------------------------------------------------ //! [output_text] string msg = (mode == CAPTURING) ? "100/100" : mode == CALIBRATED ? "Calibrated" : "Press 'g' to start"; int baseLine = 0; Size textSize = getTextSize(msg, 1, 1, 1, &baseLine); Point textOrigin(view.cols - 2*textSize.width - 10, view.rows - 2*baseLine - 10); if( mode == CAPTURING ) { if(s.showUndistorted) msg = cv::format( "%d/%d Undist", (int)imagePoints.size(), s.nrFrames ); else msg = cv::format( "%d/%d", (int)imagePoints.size(), s.nrFrames ); } putText( view, msg, textOrigin, 1, 1, mode == CALIBRATED ? GREEN : RED); if( blinkOutput ) bitwise_not(view, view); //! [output_text] //------------------------- Video capture output undistorted ------------------------------ //! [output_undistorted] if( mode == CALIBRATED && s.showUndistorted ) { Mat temp = view.clone(); if (s.useFisheye) { Mat newCamMat; fisheye::estimateNewCameraMatrixForUndistortRectify(cameraMatrix, distCoeffs, imageSize, Matx33d::eye(), newCamMat, 1); cv::fisheye::undistortImage(temp, view, cameraMatrix, distCoeffs, newCamMat); } else undistort(temp, view, cameraMatrix, distCoeffs); } //! [output_undistorted] //------------------------------ Show image and check for input commands ------------------- //! [await_input] imshow("Image View", view); char key = (char)waitKey(s.inputCapture.isOpened() ? 50 : s.delay); if( key == ESC_KEY ) break; if( key == 'u' && mode == CALIBRATED ) s.showUndistorted = !s.showUndistorted; if( s.inputCapture.isOpened() && key == 'g' ) { mode = CAPTURING; imagePoints.clear(); } //! [await_input] } // -----------------------Show the undistorted image for the image list ------------------------ //! [show_results] if( s.inputType == Settings::IMAGE_LIST && s.showUndistorted && !cameraMatrix.empty()) { Mat view, rview, map1, map2; if (s.useFisheye) // 如果使用鱼眼镜头模型进行畸变矫正 { Mat newCamMat; // 定义新的相机矩阵 // 估计畸变校正和矩形映射所需的新相机矩阵 fisheye::estimateNewCameraMatrixForUndistortRectify(cameraMatrix, distCoeffs, imageSize, Matx33d::eye(), newCamMat, 1); // 初始化畸变矫正和矩形映射 fisheye::initUndistortRectifyMap(cameraMatrix, distCoeffs, Matx33d::eye(), newCamMat, imageSize, CV_16SC2, map1, map2); } else { initUndistortRectifyMap( cameraMatrix, distCoeffs, Mat(), getOptimalNewCameraMatrix(cameraMatrix, distCoeffs, imageSize, 1, imageSize, 0), imageSize, CV_16SC2, map1, map2); } for(size_t i = 0; i < s.imageList.size(); i++ ) { view = imread(s.imageList[i], IMREAD_COLOR); if(view.empty()) continue; remap(view, rview, map1, map2, INTER_LINEAR); imshow("Image View", rview); char c = (char)waitKey(); if( c == ESC_KEY || c == 'q' || c == 'Q' ) break; } } //! [show_results] return 0; } //! [compute_errors] 计算重映射误差的函数 static double computeReprojectionErrors( const vector<vector<Point3f> >& objectPoints, const vector<vector<Point2f> >& imagePoints, const vector<Mat>& rvecs, const vector<Mat>& tvecs, const Mat& cameraMatrix , const Mat& distCoeffs, vector<float>& perViewErrors, bool fisheye) { vector<Point2f> imagePoints2; size_t totalPoints = 0; double totalErr = 0, err; perViewErrors.resize(objectPoints.size()); for(size_t i = 0; i < objectPoints.size(); ++i ) { if (fisheye) // 如果是鱼眼镜头模型,使用fisheye命名空间的函数来投影点 { fisheye::projectPoints(objectPoints[i], imagePoints2, rvecs[i], tvecs[i], cameraMatrix, distCoeffs); } else { projectPoints(objectPoints[i], rvecs[i], tvecs[i], cameraMatrix, distCoeffs, imagePoints2); } err = norm(imagePoints[i], imagePoints2, NORM_L2); size_t n = objectPoints[i].size(); perViewErrors[i] = (float) std::sqrt(err*err/n); totalErr += err*err; totalPoints += n; } return std::sqrt(totalErr/totalPoints); } //! [compute_errors] //! [board_corners]计算棋盘格角点位置 static void calcBoardCornerPositions(Size boardSize, float squareSize, vector<Point3f>& corners, Settings::Pattern patternType /*= Settings::CHESSBOARD*/) { corners.clear(); switch(patternType) { case Settings::CHESSBOARD: // 标准棋盘格 case Settings::CIRCLES_GRID: // 圆形网格棋盘 for (int i = 0; i < boardSize.height; ++i) { for (int j = 0; j < boardSize.width; ++j) { corners.push_back(Point3f(j*squareSize, i*squareSize, 0)); } } break; case Settings::CHARUCOBOARD: // CHARUCO棋盘 for (int i = 0; i < boardSize.height - 1; ++i) { for (int j = 0; j < boardSize.width - 1; ++j) { corners.push_back(Point3f(j*squareSize, i*squareSize, 0)); } } break; case Settings::ASYMMETRIC_CIRCLES_GRID: // 非对称圆形网格 for (int i = 0; i < boardSize.height; i++) { for (int j = 0; j < boardSize.width; j++) { corners.push_back(Point3f((2 * j + i % 2)*squareSize, i*squareSize, 0)); } } break; default: break; } } //! [board_corners] static bool runCalibration( Settings& s, Size& imageSize, Mat& cameraMatrix, Mat& distCoeffs, vector<vector<Point2f> > imagePoints, vector<Mat>& rvecs, vector<Mat>& tvecs, vector<float>& reprojErrs, double& totalAvgErr, vector<Point3f>& newObjPoints, float grid_width, bool release_object) { //! [fixed_aspect] cameraMatrix = Mat::eye(3, 3, CV_64F); if( !s.useFisheye && s.flag & CALIB_FIX_ASPECT_RATIO ) cameraMatrix.at<double>(0,0) = s.aspectRatio; //! [fixed_aspect] if (s.useFisheye) { distCoeffs = Mat::zeros(4, 1, CV_64F); } else { distCoeffs = Mat::zeros(8, 1, CV_64F); } vector<vector<Point3f> > objectPoints(1); calcBoardCornerPositions(s.boardSize, s.squareSize, objectPoints[0], s.calibrationPattern); if (s.calibrationPattern == Settings::Pattern::CHARUCOBOARD) { objectPoints[0][s.boardSize.width - 2].x = objectPoints[0][0].x + grid_width; } else { objectPoints[0][s.boardSize.width - 1].x = objectPoints[0][0].x + grid_width; } newObjPoints = objectPoints[0]; objectPoints.resize(imagePoints.size(),objectPoints[0]); //Find intrinsic and extrinsic camera parameters double rms; if (s.useFisheye) { Mat _rvecs, _tvecs; rms = fisheye::calibrate(objectPoints, imagePoints, imageSize, cameraMatrix, distCoeffs, _rvecs, _tvecs, s.flag); rvecs.reserve(_rvecs.rows); tvecs.reserve(_tvecs.rows); for(int i = 0; i < int(objectPoints.size()); i++){ rvecs.push_back(_rvecs.row(i)); tvecs.push_back(_tvecs.row(i)); } } else { int iFixedPoint = -1; if (release_object) iFixedPoint = s.boardSize.width - 1; rms = calibrateCameraRO(objectPoints, imagePoints, imageSize, iFixedPoint, cameraMatrix, distCoeffs, rvecs, tvecs, newObjPoints, s.flag | CALIB_USE_LU); } if (release_object) { cout << "New board corners: " << endl; cout << newObjPoints[0] << endl; cout << newObjPoints[s.boardSize.width - 1] << endl; cout << newObjPoints[s.boardSize.width * (s.boardSize.height - 1)] << endl; cout << newObjPoints.back() << endl; } cout << "Re-projection error reported by calibrateCamera: "<< rms << endl; bool ok = checkRange(cameraMatrix) && checkRange(distCoeffs); objectPoints.clear(); objectPoints.resize(imagePoints.size(), newObjPoints); totalAvgErr = computeReprojectionErrors(objectPoints, imagePoints, rvecs, tvecs, cameraMatrix, distCoeffs, reprojErrs, s.useFisheye); return ok; } // Print camera parameters to the output file static void saveCameraParams( Settings& s, Size& imageSize, Mat& cameraMatrix, Mat& distCoeffs, const vector<Mat>& rvecs, const vector<Mat>& tvecs, const vector<float>& reprojErrs, const vector<vector<Point2f> >& imagePoints, double totalAvgErr, const vector<Point3f>& newObjPoints ) { FileStorage fs( s.outputFileName, FileStorage::WRITE ); time_t tm; time( &tm ); struct tm *t2 = localtime( &tm ); char buf[1024]; strftime( buf, sizeof(buf), "%c", t2 ); fs << "calibration_time" << buf; if( !rvecs.empty() || !reprojErrs.empty() ) fs << "nr_of_frames" << (int)std::max(rvecs.size(), reprojErrs.size()); fs << "image_width" << imageSize.width; fs << "image_height" << imageSize.height; fs << "board_width" << s.boardSize.width; fs << "board_height" << s.boardSize.height; fs << "square_size" << s.squareSize; fs << "marker_size" << s.markerSize; if( !s.useFisheye && s.flag & CALIB_FIX_ASPECT_RATIO ) fs << "fix_aspect_ratio" << s.aspectRatio; if (s.flag) { std::stringstream flagsStringStream; if (s.useFisheye) { flagsStringStream << "flags:" << (s.flag & fisheye::CALIB_FIX_SKEW ? " +fix_skew" : "") << (s.flag & fisheye::CALIB_FIX_K1 ? " +fix_k1" : "") << (s.flag & fisheye::CALIB_FIX_K2 ? " +fix_k2" : "") << (s.flag & fisheye::CALIB_FIX_K3 ? " +fix_k3" : "") << (s.flag & fisheye::CALIB_FIX_K4 ? " +fix_k4" : "") << (s.flag & fisheye::CALIB_RECOMPUTE_EXTRINSIC ? " +recompute_extrinsic" : ""); } else { flagsStringStream << "flags:" << (s.flag & CALIB_USE_INTRINSIC_GUESS ? " +use_intrinsic_guess" : "") << (s.flag & CALIB_FIX_ASPECT_RATIO ? " +fix_aspectRatio" : "") << (s.flag & CALIB_FIX_PRINCIPAL_POINT ? " +fix_principal_point" : "") << (s.flag & CALIB_ZERO_TANGENT_DIST ? " +zero_tangent_dist" : "") << (s.flag & CALIB_FIX_K1 ? " +fix_k1" : "") << (s.flag & CALIB_FIX_K2 ? " +fix_k2" : "") << (s.flag & CALIB_FIX_K3 ? " +fix_k3" : "") << (s.flag & CALIB_FIX_K4 ? " +fix_k4" : "") << (s.flag & CALIB_FIX_K5 ? " +fix_k5" : ""); } fs.writeComment(flagsStringStream.str()); } fs << "flags" << s.flag; fs << "fisheye_model" << s.useFisheye; fs << "camera_matrix" << cameraMatrix; fs << "distortion_coefficients" << distCoeffs; fs << "avg_reprojection_error" << totalAvgErr; if (s.writeExtrinsics && !reprojErrs.empty()) fs << "per_view_reprojection_errors" << Mat(reprojErrs); if(s.writeExtrinsics && !rvecs.empty() && !tvecs.empty() ) { CV_Assert(rvecs[0].type() == tvecs[0].type()); Mat bigmat((int)rvecs.size(), 6, CV_MAKETYPE(rvecs[0].type(), 1)); bool needReshapeR = rvecs[0].depth() != 1 ? true : false; bool needReshapeT = tvecs[0].depth() != 1 ? true : false; for( size_t i = 0; i < rvecs.size(); i++ ) { Mat r = bigmat(Range(int(i), int(i+1)), Range(0,3)); Mat t = bigmat(Range(int(i), int(i+1)), Range(3,6)); if(needReshapeR) rvecs[i].reshape(1, 1).copyTo(r); else { //*.t() is MatExpr (not Mat) so we can use assignment operator CV_Assert(rvecs[i].rows == 3 && rvecs[i].cols == 1); r = rvecs[i].t(); } if(needReshapeT) tvecs[i].reshape(1, 1).copyTo(t); else { CV_Assert(tvecs[i].rows == 3 && tvecs[i].cols == 1); t = tvecs[i].t(); } } fs.writeComment("a set of 6-tuples (rotation vector + translation vector) for each view"); fs << "extrinsic_parameters" << bigmat; } if(s.writePoints && !imagePoints.empty() ) { Mat imagePtMat((int)imagePoints.size(), (int)imagePoints[0].size(), CV_32FC2); for( size_t i = 0; i < imagePoints.size(); i++ ) { Mat r = imagePtMat.row(int(i)).reshape(2, imagePtMat.cols); Mat imgpti(imagePoints[i]); imgpti.copyTo(r); } fs << "image_points" << imagePtMat; } if( s.writeGrid && !newObjPoints.empty() ) { fs << "grid_points" << newObjPoints; } } //! [run_and_save] bool runCalibrationAndSave(Settings& s, Size imageSize, Mat& cameraMatrix, Mat& distCoeffs, vector<vector<Point2f> > imagePoints, float grid_width, bool release_object) { vector<Mat> rvecs, tvecs; vector<float> reprojErrs; double totalAvgErr = 0; vector<Point3f> newObjPoints; bool ok = runCalibration(s, imageSize, cameraMatrix, distCoeffs, imagePoints, rvecs, tvecs, reprojErrs, totalAvgErr, newObjPoints, grid_width, release_object); cout << (ok ? "Calibration succeeded" : "Calibration failed") << ". avg re projection error = " << totalAvgErr << endl; if (ok) saveCameraParams(s, imageSize, cameraMatrix, distCoeffs, rvecs, tvecs, reprojErrs, imagePoints, totalAvgErr, newObjPoints); return ok; } //! [run_and_save]
注:该标定例程为OpenCV自带,可自行查找,也可从我的博客下载https://download.csdn.net/download/jppdss/89046059
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。