#include #include #_using symbol_shorthand::x;">
当前位置:   article > 正文

LVI-SAM mapOptimization 代码解析_using symbol_shorthand::x;

using symbol_shorthand::x;

本篇文章来对图优化内容进行解析。如今,图优化和卡尔曼滤波已经成为了主流的后端优化方法。

头文件

  1. // 头文件,和imuPreintegration差不多
  2. #include "utility.h"
  3. #include "lvi_sam/cloud_info.h"
  4. #include <gtsam/geometry/Rot3.h>
  5. #include <gtsam/geometry/Pose3.h>
  6. #include <gtsam/slam/PriorFactor.h>
  7. #include <gtsam/slam/BetweenFactor.h>
  8. #include <gtsam/navigation/GPSFactor.h>
  9. #include <gtsam/navigation/ImuFactor.h>
  10. #include <gtsam/navigation/CombinedImuFactor.h>
  11. #include <gtsam/nonlinear/NonlinearFactorGraph.h>
  12. #include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
  13. #include <gtsam/nonlinear/Marginals.h>
  14. #include <gtsam/nonlinear/Values.h>
  15. #include <gtsam/inference/Symbol.h>
  16. #include <gtsam/nonlinear/ISAM2.h>
  17. // 命名空间,这个作用就是不用写gtsam::了,就直接用就可以
  18. using namespace gtsam;
  19. // 这个意思就是X表示位姿(位置 + 姿态) 然后V表示速度,这个后面有赋值,例如V(0)这种的
  20. using symbol_shorthand::X; // Pose3 (x,y,z,r,p,y)
  21. using symbol_shorthand::V; // Vel (xdot,ydot,zdot)
  22. using symbol_shorthand::B; // Bias (ax,ay,az,gx,gy,gz)
  23. using symbol_shorthand::G; // GPS pose

首先这个头文件和imu预积分的内容差不多,其中有几个库是imu预积分没有的,当然imu预积分没有用上那些库,所以才没有include进去。然后下面是使用了gtsam命名空间,这样的话就可以不用谢gtsam::然后啥啥啥变量了,就直接写变量名即可。然后是XVBG,这些就是起了个名字,具体的用法的话,在我的印象里X(0),类似这种,进行赋值。

变量

  1. /*
  2. * A point cloud type that has 6D pose info ([x,y,z,roll,pitch,yaw] intensity is time stamp)
  3. * 这里非常贴心的告诉阅读者 intensity里面装的是时间戳
  4. */
  5. struct PointXYZIRPYT
  6. {
  7. PCL_ADD_POINT4D // xyz + padding(填充的首选方法)
  8. PCL_ADD_INTENSITY;
  9. float roll;
  10. float pitch;
  11. float yaw;
  12. double time;
  13. EIGEN_MAKE_ALIGNED_OPERATOR_NEW // 这个是eigen字节对齐用的,前面搜过
  14. } EIGEN_ALIGN16;
  15. // 注册为PCL点云格式 (PCL内有自己定义的一系列点云格式,用结构体扩展后需要注册)
  16. POINT_CLOUD_REGISTER_POINT_STRUCT (PointXYZIRPYT,
  17. (float, x, x) (float, y, y)
  18. (float, z, z) (float, intensity, intensity)
  19. (float, roll, roll) (float, pitch, pitch) (float, yaw, yaw)
  20. (double, time, time))
  21. typedef PointXYZIRPYT PointTypePose; // 重命名
  22. // 定义的类,还是公有继承ParamServer
  23. class mapOptimization : public ParamServer
  24. {
  25. public:
  26. // gtsam
  27. NonlinearFactorGraph gtSAMgraph; // 包含非线性因子的因子图
  28. // Values 用于指定因子图中一组变量的值
  29. Values initialEstimate;
  30. Values optimizedEstimate;
  31. // 优化器
  32. ISAM2 *isam;
  33. Values isamCurrentEstimate;
  34. Eigen::MatrixXd poseCovariance; // 位置协方差
  35. // 发布
  36. // 下面是在ros作用域下面,也就是可能会显示到rviz里面
  37. ros::Publisher pubLaserCloudSurround; // 点云信息 发布关键帧的map的特征点云
  38. ros::Publisher pubOdomAftMappedROS; // odometry信息 发布激光里程计
  39. ros::Publisher pubKeyPoses; // 点云信息 发布关键位姿信息
  40. ros::Publisher pubPath; // 路径信息 发布路径,主要是给rivz用于展示
  41. ros::Publisher pubHistoryKeyFrames; // 历史点云信息 发布历史关键帧
  42. ros::Publisher pubIcpKeyFrames; // 点云信息 icp方法 配准的 对齐的 发布当前关键帧经过闭环优化后的位姿变换之后的特征点云
  43. ros::Publisher pubRecentKeyFrames; // 点云信息 发布局部map的降采样平面边重合
  44. ros::Publisher pubRecentKeyFrame; // 点云信息 发布历史帧的角点、平面点降采样集合
  45. ros::Publisher pubCloudRegisteredRaw; // 点云信息 校准之后的点云信息
  46. ros::Publisher pubLoopConstraintEdge; // MarkerArray信息 发布闭环边信息
  47. // 输入:激光点云信息、GPS信息、闭环信息
  48. ros::Subscriber subLaserCloudInfo; // 订阅当前激光帧点云信息,来自FeatureExtraction
  49. ros::Subscriber subGPS; // 订阅GPS里程计
  50. ros::Subscriber subLoopInfo; // 订阅来自外部闭环检测程序提供的闭环数据。(本程序没有提供)
  51. std::deque<nav_msgs::Odometry> gpsQueue;
  52. lvi_sam::cloud_info cloudInfo; // 这个是作者团队自己定义的,可以在lvi_sam/cloud_info.h里面看一下
  53. // 历史所有关键帧的角点集合(降采样)
  54. vector<pcl::PointCloud<PointType>::Ptr> cornerCloudKeyFrames;
  55. // 历史所有关键帧的平面点集合(降采样)
  56. vector<pcl::PointCloud<PointType>::Ptr> surfCloudKeyFrames;
  57. // 历史关键帧位姿
  58. pcl::PointCloud<PointType>::Ptr cloudKeyPoses3D;
  59. pcl::PointCloud<PointTypePose>::Ptr cloudKeyPoses6D;
  60. pcl::PointCloud<PointType>::Ptr laserCloudCornerLast; // corner feature set from odoOptimization 来自优化里面的角点
  61. pcl::PointCloud<PointType>::Ptr laserCloudSurfLast; // surf feature set from odoOptimization
  62. // 后面DS是downsampled 降采样的简称 是上面经过降采样后的数据
  63. pcl::PointCloud<PointType>::Ptr laserCloudCornerLastDS; // downsampled corner featuer set from odoOptimization
  64. pcl::PointCloud<PointType>::Ptr laserCloudSurfLastDS; // downsampled surf featuer set from odoOptimization
  65. pcl::PointCloud<PointType>::Ptr laserCloudOri;
  66. pcl::PointCloud<PointType>::Ptr coeffSel; // coeffSel : 系数选择
  67. std::vector<PointType> laserCloudOriCornerVec; // corner point holder for parallel computation 用于并行计算的角点支架
  68. std::vector<PointType> coeffSelCornerVec;
  69. std::vector<bool> laserCloudOriCornerFlag;
  70. std::vector<PointType> laserCloudOriSurfVec; // surf point holder for parallel computation
  71. std::vector<PointType> coeffSelSurfVec;
  72. std::vector<bool> laserCloudOriSurfFlag;
  73. pcl::PointCloud<PointType>::Ptr laserCloudCornerFromMap;
  74. pcl::PointCloud<PointType>::Ptr laserCloudSurfFromMap;
  75. pcl::PointCloud<PointType>::Ptr laserCloudCornerFromMapDS;
  76. pcl::PointCloud<PointType>::Ptr laserCloudSurfFromMapDS;
  77. //局部关键帧构建的地图点云,对应kdtree,用于scan-to-map找相邻
  78. pcl::KdTreeFLANN<PointType>::Ptr kdtreeCornerFromMap;
  79. pcl::KdTreeFLANN<PointType>::Ptr kdtreeSurfFromMap;
  80. pcl::KdTreeFLANN<PointType>::Ptr kdtreeSurroundingKeyPoses;
  81. pcl::KdTreeFLANN<PointType>::Ptr kdtreeHistoryKeyPoses;
  82. pcl::PointCloud<PointType>::Ptr latestKeyFrameCloud;
  83. pcl::PointCloud<PointType>::Ptr nearHistoryKeyFrameCloud;
  84. //降采样
  85. pcl::VoxelGrid<PointType> downSizeFilterCorner;
  86. pcl::VoxelGrid<PointType> downSizeFilterSurf;
  87. pcl::VoxelGrid<PointType> downSizeFilterICP;
  88. pcl::VoxelGrid<PointType> downSizeFilterSurroundingKeyPoses; // for surrounding key poses of scan-to-map optimization
  89. ros::Time timeLaserInfoStamp;
  90. double timeLaserInfoCur;
  91. float transformTobeMapped[6];
  92. std::mutex mtx; // 这个是锁,用于共享内存数据读取的
  93. bool isDegenerate = false; // Degenerate:退化
  94. cv::Mat matP; // 这个是矩阵
  95. // 当前激光帧角点数量 还得是降采样的
  96. int laserCloudCornerLastDSNum = 0;
  97. // 当前激光帧平面点数量 降采样之后的
  98. int laserCloudSurfLastDSNum = 0;
  99. bool aLoopIsClosed = false;
  100. int imuPreintegrationResetId = 0;
  101. //一个关于运动路径的消息
  102. nav_msgs::Path globalPath;
  103. //当前帧位姿
  104. Eigen::Affine3f transPointAssociateToMap;
  105. map<int, int> loopIndexContainer; // from new to old
  106. vector<pair<int, int>> loopIndexQueue;
  107. vector<gtsam::Pose3> loopPoseQueue;
  108. vector<gtsam::noiseModel::Diagonal::shared_ptr> loopNoiseQueue;

首先是定义了一个结构体,这个结构体里面包含了xyz RPY 还有时间戳这些非常基本的内容。然后这个EIGEN_MAKE_ALIGNED_OPERATOR_NEW是EIGEN字节对齐用的,这个不再重述了。然后下面是注册为点云格式,然后这个的话,就看看记一下格式模板即可。最后把定义的结构体重新命名一下,这样显得更加高端一些!下面就是mapOptimization类中定义的变量,首先它公有继承自ParamServer。然后下面的话就是一些列的值,这里就不过多讲解了,自己看注释。这里的话,分清作用域即可。然后下面有锁std::mutex mtx,还有一些下面函数需要单独使用的变量,哲理的话不讲解,没啥意思。

函数

  1. //构造函数
  2. mapOptimization()
  3. {
  4. ISAM2Params parameters; // ISAM2优化器的参数
  5. // 设置参数
  6. parameters.relinearizeThreshold = 0.1;
  7. parameters.relinearizeSkip = 1;
  8. // 初始化优化器
  9. isam = new ISAM2(parameters);
  10. // 发布历史关键帧里程计
  11. pubKeyPoses = nh.advertise<sensor_msgs::PointCloud2>(PROJECT_NAME + "/lidar/mapping/trajectory", 1); // 后面这个1是消息接受的数量,就看成能接受几条就可以
  12. // 发布局部关键帧地图的特征点云
  13. pubLaserCloudSurround = nh.advertise<sensor_msgs::PointCloud2>(PROJECT_NAME + "/lidar/mapping/map_global", 1);
  14. // 发布激光里程计,rviz中表现为坐标轴
  15. pubOdomAftMappedROS = nh.advertise<nav_msgs::Odometry> (PROJECT_NAME + "/lidar/mapping/odometry", 1);
  16. // 发布路径
  17. pubPath = nh.advertise<nav_msgs::Path> (PROJECT_NAME + "/lidar/mapping/path", 1);
  18. // 订阅当前激光帧点云信息,来自featureExtraction
  19. subLaserCloudInfo = nh.subscribe<lvi_sam::cloud_info> (PROJECT_NAME + "/lidar/feature/cloud_info", 5, &mapOptimization::laserCloudInfoHandler, this, ros::TransportHints().tcpNoDelay());
  20. // 订阅GPS里程计 这个GPS在图优化部分可以先不考虑,这个加入GPS会更精确
  21. subGPS = nh.subscribe<nav_msgs::Odometry> (gpsTopic, 50, &mapOptimization::gpsHandler, this, ros::TransportHints().tcpNoDelay());
  22. // 订阅来自外部闭环检测程序提供的闭环数据
  23. subLoopInfo = nh.subscribe<std_msgs::Float64MultiArray>(PROJECT_NAME + "/vins/loop/match_frame", 5, &mapOptimization::loopHandler, this, ros::TransportHints().tcpNoDelay());
  24. // 发布闭环匹配关键帧局部地图
  25. pubHistoryKeyFrames = nh.advertise<sensor_msgs::PointCloud2>(PROJECT_NAME + "/lidar/mapping/loop_closure_history_cloud", 1);
  26. // 发布当前关键帧经过闭环优化后的位姿变换以后的特征点云
  27. pubIcpKeyFrames = nh.advertise<sensor_msgs::PointCloud2>(PROJECT_NAME + "/lidar/mapping/loop_closure_corrected_cloud", 1);
  28. // 发布闭环边,rviz中变现为闭环帧之间的连线
  29. pubLoopConstraintEdge = nh.advertise<visualization_msgs::MarkerArray>(PROJECT_NAME + "/lidar/mapping/loop_closure_constraints", 1);
  30. // 发布局部地图的降采样平面点集合
  31. pubRecentKeyFrames = nh.advertise<sensor_msgs::PointCloud2>(PROJECT_NAME + "/lidar/mapping/map_local", 1);
  32. // 发布历史帧的角点、平面点降采样集合
  33. pubRecentKeyFrame = nh.advertise<sensor_msgs::PointCloud2>(PROJECT_NAME + "/lidar/mapping/cloud_registered", 1);
  34. // 发布当前帧原始点云配准之后的点云
  35. pubCloudRegisteredRaw = nh.advertise<sensor_msgs::PointCloud2>(PROJECT_NAME + "/lidar/mapping/cloud_registered_raw", 1);
  36. // 设置每个体素的大小,分别为长宽高
  37. downSizeFilterCorner.setLeafSize(mappingCornerLeafSize, mappingCornerLeafSize, mappingCornerLeafSize); // 这个就相当于一个正方体了,前面也看到过 长 ====
  38. downSizeFilterSurf.setLeafSize(mappingSurfLeafSize, mappingSurfLeafSize, mappingSurfLeafSize);
  39. downSizeFilterICP.setLeafSize(mappingSurfLeafSize, mappingSurfLeafSize, mappingSurfLeafSize);
  40. // 用于扫描到贴图优化的周围关键姿势
  41. downSizeFilterSurroundingKeyPoses.setLeafSize(surroundingKeyframeDensity, surroundingKeyframeDensity, surroundingKeyframeDensity); // for surrounding key poses of scan-to-map optimization
  42. // 初始化变量
  43. allocateMemory();
  44. }

构造函数,设置了阈值的数值,然后下面的话就是订阅和发布的内容,一个advertise 一个subscribe。当然subscribe里面有函数句柄,在调用这个函数的时候,这个句柄就会自动启动,然后它们做的事情,下面来讲解。然后设置体素大小,这个没啥意思,我的理解的话还是你先搞个正方体,然后把装在正方体里面的点云用一个点来表示,这样实现降采样。开始第一个函数,分配内存。

  1. // 分配内存
  2. void allocateMemory()
  3. {
  4. // 对定义的一系列关键帧变量进行初始化
  5. // reset 括号里面的内容 和前面的类型要对齐
  6. cloudKeyPoses3D.reset(new pcl::PointCloud<PointType>());
  7. cloudKeyPoses6D.reset(new pcl::PointCloud<PointTypePose>());
  8. kdtreeSurroundingKeyPoses.reset(new pcl::KdTreeFLANN<PointType>());
  9. kdtreeHistoryKeyPoses.reset(new pcl::KdTreeFLANN<PointType>());
  10. laserCloudCornerLast.reset(new pcl::PointCloud<PointType>()); // corner feature set from odoOptimization
  11. laserCloudSurfLast.reset(new pcl::PointCloud<PointType>()); // surf feature set from odoOptimization
  12. laserCloudCornerLastDS.reset(new pcl::PointCloud<PointType>()); // downsampled corner featuer set from odoOptimization
  13. laserCloudSurfLastDS.reset(new pcl::PointCloud<PointType>()); // downsampled surf featuer set from odoOptimization
  14. laserCloudOri.reset(new pcl::PointCloud<PointType>());
  15. coeffSel.reset(new pcl::PointCloud<PointType>()); // 这个是系数选择,现在还不清楚是啥系数
  16. laserCloudOriCornerVec.resize(N_SCAN * Horizon_SCAN);
  17. coeffSelCornerVec.resize(N_SCAN * Horizon_SCAN);
  18. laserCloudOriCornerFlag.resize(N_SCAN * Horizon_SCAN);
  19. laserCloudOriSurfVec.resize(N_SCAN * Horizon_SCAN);
  20. coeffSelSurfVec.resize(N_SCAN * Horizon_SCAN);
  21. laserCloudOriSurfFlag.resize(N_SCAN * Horizon_SCAN);
  22. // 这里的N_SCAN 我的理解是横着的扫描线 总共有16条,然后运行代码的时候,可以找一个树的点云,然后发现他是一层一层的,感觉是验证了这个N_SCAN是横着的
  23. std::fill(laserCloudOriCornerFlag.begin(), laserCloudOriCornerFlag.end(), false); // 这里应该是都填充为false
  24. std::fill(laserCloudOriSurfFlag.begin(), laserCloudOriSurfFlag.end(), false);
  25. laserCloudCornerFromMap.reset(new pcl::PointCloud<PointType>());
  26. laserCloudSurfFromMap.reset(new pcl::PointCloud<PointType>());
  27. laserCloudCornerFromMapDS.reset(new pcl::PointCloud<PointType>());
  28. laserCloudSurfFromMapDS.reset(new pcl::PointCloud<PointType>());
  29. kdtreeCornerFromMap.reset(new pcl::KdTreeFLANN<PointType>());
  30. kdtreeSurfFromMap.reset(new pcl::KdTreeFLANN<PointType>());
  31. latestKeyFrameCloud.reset(new pcl::PointCloud<PointType>());
  32. nearHistoryKeyFrameCloud.reset(new pcl::PointCloud<PointType>());
  33. for (int i = 0; i < 6; ++i)
  34. {
  35. transformTobeMapped[i] = 0;
  36. }
  37. matP = cv::Mat(6, 6, CV_32F, cv::Scalar::all(0));
  38. }

这个没啥好说的 reset resize fill 等函数操作 都是给这个板块里的变量在清除里面可能存在的数值。哲理的话,我对N_SCAN * Horizon_SCAN的理解,就是行×列,这个雷达的扫描线就是一行行的,横着扫的。这个是我的理解,同时这个的话,也是转为一维坐标的关键,知道他是按行来一个个存储的就ok。还有这个fill函数,第一次遇到,一看名字就知道他是干啥的,但是里面具体的逻辑,再读一遍的时候,细细研究。这里没啥好说的,就后面括号里面的内容 类型,和前面的变量对齐就可以了。

  1. // 订阅激光帧点云信息 这个就是构造函数里面订阅函数里面的一个参数,就是一用到构造函数的那个变量,这个就开始启动了
  2. void laserCloudInfoHandler(const lvi_sam::cloud_infoConstPtr& msgIn)
  3. {
  4. // extract time stamp
  5. timeLaserInfoStamp = msgIn->header.stamp;
  6. timeLaserInfoCur = msgIn->header.stamp.toSec(); // 这两个都是一样的内容,不过形式不一样而已,一个是转换成秒的,上面那个是没转换的
  7. // extract info ana(and 我猜测的) feature cloud 提取当前激光帧角点、平面点集合
  8. cloudInfo = *msgIn;
  9. pcl::fromROSMsg(msgIn->cloud_corner, *laserCloudCornerLast);
  10. pcl::fromROSMsg(msgIn->cloud_surface, *laserCloudSurfLast);
  11. // 设置锁(同步)
  12. std::lock_guard<std::mutex> lock(mtx);
  13. static double timeLastProcessing = -1;
  14. // mapping执行频率控制
  15. // 如果当前激光帧与上一帧激光帧之间的差值大于某个值
  16. // mappingProcessInterval在utility.h中被初始化为0.15
  17. if (timeLaserInfoCur - timeLastProcessing >= mappingProcessInterval)
  18. {
  19. // 更新timeLastProcessing的值
  20. timeLastProcessing = timeLaserInfoCur;
  21. // 当前帧初始化
  22. // 1.如果是第一帧,用原始imu数据的RPY初始化当前帧位姿(旋转部分)
  23. // 2.后续帧,用imu里程计计算两帧之间的增量位姿变换,作用于前一帧的激光位姿,得到当前帧激光位姿
  24. // 我的理解:要不使用vins来更新,要不使用imu来更新,然后一系列的转换矩阵相乘,得到坐标
  25. updateInitialGuess();
  26. // 提取局部角点、平面点云集合,加入局部地图
  27. // 1.对最近的一帧关键帧,搜索时空纬度上相邻的关键帧集合,降采样一下
  28. // 2.对关键帧集合中的每一帧,提取对应的角点、平面点,加入局部地图中
  29. // 先nearby 再extractcloud 最后结束,这个就是对周围的关键帧做一个处理
  30. extractSurroundingKeyFrames();
  31. // 当前激光帧角点、平面点集合降采样
  32. // 重复了两边的降采样操作,这个没啥多解析的地方
  33. downsampleCurrentScan();
  34. // 1、要求当前帧特征点数量足够多,且匹配的点数够多,才执行优化
  35. // 2、迭代30次优化
  36. // 1)当前激光帧角点寻找局部map匹配点
  37. // a.更新当前帧位姿,将当前帧角点坐标变换到map系下,在局部map中查找5个最近点,距离小于1m,且5个点构成直线,
  38. // 则认为匹配上了(用距离中心点的协方差矩阵,特征值进行判断)
  39. // b.计算当前帧角点到直线的距离、垂线的单位向量,存储为角点参数
  40. // 2)当前激光帧平面点寻找局部map匹配点
  41. // a.更新当前帧位姿,将当前帧平面点坐标交换到map系下,在局部map中查找5个最近点,距离小于1米,且5个点构成平面
  42. // (最小二乘拟合平面),则认为匹配上了
  43. // b.计算当前帧平面点到平面的距离、垂线的单位向量,存储为平面点参数
  44. // 3)提取当前帧中与局部map匹配上了的角点、平面点,加入同一集合
  45. // 4)对匹配特征点计算Jacobian矩阵,观测值为特征点到直线、平面的距离,构建高斯牛顿方程,迭代优化当前位姿,存transformTobeMapped
  46. // 3、用imu原始RPY数据与scan-to-map优化后的位姿进行加权融合,更新当前帧位姿的roll pirch 约束z坐标
  47. scan2MapOptimization();
  48. // 因子图优化,更新所有关键帧位姿
  49. // 设置当前帧为关键帧并执行因子图优化
  50. // 计算当前帧与前一帧位姿变化,如果变化太小,不设为关键帧,反之设为关键帧
  51. // 添加激光里程计因子、GPS因子、闭环因子
  52. // 执行因子图优化
  53. // 得到当前帧优化后位姿,位姿协方差
  54. // 添加cloudKeyPoses3D,cloudKeyPoses6D,更新transformTobeMapped,添加当前关键帧的角点、平面点集合
  55. // 该优化是独立于scan-map的另一个优化
  56. saveKeyFramesAndFactor();
  57. // 更新因子图中所有变量节点的位姿,也就是所有历史关键帧的位姿,更新里程计轨迹
  58. correctPoses();
  59. // 发布激光里程计
  60. publishOdometry();
  61. // 发布里程计、点云、轨迹
  62. // 1、发布历史关键帧位姿集合
  63. // 2、发布局部地图降采样平面点集合
  64. // 3、发布历史帧(累加的)的角点、平面点降采样集合
  65. // 4、发布里程计轨迹
  66. publishFrames();
  67. }
  68. }

雷达点云的句柄,当然是subcribe雷达点云的时候启动的,一开始获取时间,然后从ROS作用域下获得点云的数据,然后上锁,下面的话,就是一系列函数,对于这些函数能干啥,都在代码里面注释了。当然这是学习了网上各路神仙以后总结下来的代码。

laserCloudInfoHandler:

1、updateInitialGuess();首先初始化 这个Guess可以想成高斯分布,完了这个我不确定,反正是初始化

2、extractSurroundingKeyFrames();这个提取周围的关键帧

3、downsampleCurrentScan(); 降采样,就是上一步可能提取的数据太多,计算机一下处理不了这么多

4、scan2MapOptimization(); 这个部分还不是很理解,大体内容的话就是cornerOptimization、surfOptimization然后combineOptimization,里面还有LMOptimization,这些优化的内容,前面的话感觉就是要找到角点和平面点,然后你算出来个coeff用在后面的优化中,后面两个的话就是不断地优化,然后他最后判断一个delta啥东西的,是两个!然后不停地迭代优化,找到最优解。就是里面的话一些变量不是很明白,我推测,应该论文原文会有,或者是一些基础的知识,但是我没看到。

5、saveKeyFramesAndFactor();这部分的话就是添加因子,然后保存关键帧的一些操作

6、correctPoses();这部分的话,没看懂的是aLoopIsClosed是啥意思,然后这个东西在if条件中用到,这个地方不是很理解,这个模块的话就是在修正位姿。

7、publishOdometry(); 发布激光里程计

8、publishFrames(); 发布帧 这两个的话可以具体到函数里面看看它到底发布了啥,这里不多讲了

  1. void gpsHandler(const nav_msgs::Odometry::ConstPtr& gpsMsg)
  2. {
  3. std::lock_guard<std::mutex> lock(mtx);
  4. gpsQueue.push_back(*gpsMsg);
  5. }
  6. void pointAssociateToMap(PointType const * const pi, PointType * const po)
  7. {
  8. po->x = transPointAssociateToMap(0,0) * pi->x
  9. +transPointAssociateToMap(0,1) * pi->y
  10. +transPointAssociateToMap(0,2) * pi->z
  11. +transPointAssociateToMap(0,3);
  12. po->y = transPointAssociateToMap(1,0) * pi->x
  13. +transPointAssociateToMap(1,1) * pi->y
  14. +transPointAssociateToMap(1,2) * pi->z
  15. +transPointAssociateToMap(1,3);
  16. po->z = transPointAssociateToMap(2,0) * pi->x
  17. +transPointAssociateToMap(2,1) * pi->y
  18. +transPointAssociateToMap(2,2) * pi->z
  19. +transPointAssociateToMap(2,3);
  20. po->intensity = pi->intensity;
  21. }
  22. pcl::PointCloud<PointType>::Ptr transformPointCloud(pcl::PointCloud<PointType>::Ptr cloudIn, PointTypePose* transformIn)
  23. {
  24. pcl::PointCloud<PointType>::Ptr cloudOut(new pcl::PointCloud<PointType>());
  25. PointType *pointFrom;
  26. int cloudSize = cloudIn->size();
  27. cloudOut->resize(cloudSize);
  28. Eigen::Affine3f transCur = pcl::getTransformation(transformIn->x, transformIn->y, transformIn->z, transformIn->roll, transformIn->pitch, transformIn->yaw);
  29. for (int i = 0; i < cloudSize; ++i)
  30. {
  31. pointFrom = &cloudIn->points[i];
  32. cloudOut->points[i].x = transCur(0,0) * pointFrom->x
  33. +transCur(0,1) * pointFrom->y
  34. +transCur(0,2) * pointFrom->z
  35. +transCur(0,3);
  36. cloudOut->points[i].y = transCur(1,0) * pointFrom->x
  37. +transCur(1,1) * pointFrom->y
  38. +transCur(1,2) * pointFrom->z
  39. +transCur(1,3);
  40. cloudOut->points[i].z = transCur(2,0) * pointFrom->x
  41. +transCur(2,1) * pointFrom->y
  42. +transCur(2,2) * pointFrom->z
  43. +transCur(2,3);
  44. cloudOut->points[i].intensity = pointFrom->intensity;
  45. }
  46. return cloudOut;
  47. }
  48. gtsam::Pose3 affine3fTogtsamPose3(const Eigen::Affine3f& thisPose)
  49. {
  50. float x, y, z, roll, pitch, yaw;
  51. pcl::getTranslationAndEulerAngles(thisPose, x, y, z, roll, pitch, yaw);
  52. return gtsam::Pose3(gtsam::Rot3::RzRyRx(double(roll), double(pitch), double(yaw)),
  53. gtsam::Point3(double(x), double(y), double(z)));
  54. }
  55. gtsam::Pose3 pclPointTogtsamPose3(PointTypePose thisPoint)
  56. {
  57. return gtsam::Pose3(gtsam::Rot3::RzRyRx(double(thisPoint.roll), double(thisPoint.pitch), double(thisPoint.yaw)),
  58. gtsam::Point3(double(thisPoint.x), double(thisPoint.y), double(thisPoint.z)));
  59. }
  60. gtsam::Pose3 trans2gtsamPose(float transformIn[])
  61. {
  62. return gtsam::Pose3(gtsam::Rot3::RzRyRx(transformIn[0], transformIn[1], transformIn[2]),
  63. gtsam::Point3(transformIn[3], transformIn[4], transformIn[5]));
  64. }
  65. Eigen::Affine3f pclPointToAffine3f(PointTypePose thisPoint)
  66. {
  67. return pcl::getTransformation(thisPoint.x, thisPoint.y, thisPoint.z, thisPoint.roll, thisPoint.pitch, thisPoint.yaw);
  68. }
  69. Eigen::Affine3f trans2Affine3f(float transformIn[])
  70. {
  71. return pcl::getTransformation(transformIn[3], transformIn[4], transformIn[5], transformIn[0], transformIn[1], transformIn[2]);
  72. }
  73. PointTypePose trans2PointTypePose(float transformIn[])
  74. {
  75. PointTypePose thisPose6D;
  76. thisPose6D.x = transformIn[3];
  77. thisPose6D.y = transformIn[4];
  78. thisPose6D.z = transformIn[5];
  79. thisPose6D.roll = transformIn[0];
  80. thisPose6D.pitch = transformIn[1];
  81. thisPose6D.yaw = transformIn[2];
  82. return thisPose6D;
  83. }

一些转换的函数,可以看看里面到底谁赋值给了谁,这里我还没看,再看一遍的时候,再来注意这个地方。

  1. void visualizeGlobalMapThread()
  2. {
  3. ros::Rate rate(0.2); // 调整ros系统运行的快慢的
  4. while (ros::ok())
  5. {
  6. rate.sleep();
  7. publishGlobalMap();
  8. }
  9. if (savePCD == false)
  10. return;
  11. cout << "****************************************************" << endl;
  12. cout << "Saving map to pcd files ..." << endl;
  13. // create directory and remove old files;
  14. savePCDDirectory = std::getenv("HOME") + savePCDDirectory;
  15. int unused = system((std::string("exec rm -r ") + savePCDDirectory).c_str());
  16. unused = system((std::string("mkdir ") + savePCDDirectory).c_str()); ++unused;
  17. // save key frame transformations
  18. pcl::io::savePCDFileASCII(savePCDDirectory + "trajectory.pcd", *cloudKeyPoses3D);
  19. pcl::io::savePCDFileBinary(savePCDDirectory + "transformations.pcd", *cloudKeyPoses6D);
  20. // extract global point cloud map
  21. pcl::PointCloud<PointType>::Ptr globalCornerCloud(new pcl::PointCloud<PointType>());
  22. pcl::PointCloud<PointType>::Ptr globalSurfCloud(new pcl::PointCloud<PointType>());
  23. pcl::PointCloud<PointType>::Ptr globalMapCloud(new pcl::PointCloud<PointType>());
  24. for (int i = 0; i < (int)cloudKeyPoses3D->size(); i++)
  25. {
  26. // clip cloud
  27. pcl::PointCloud<PointType>::Ptr cornerTemp(new pcl::PointCloud<PointType>());
  28. pcl::PointCloud<PointType>::Ptr cornerTemp2(new pcl::PointCloud<PointType>());
  29. *cornerTemp = *transformPointCloud(cornerCloudKeyFrames[i], &cloudKeyPoses6D->points[i]);
  30. for (int j = 0; j < (int)cornerTemp->size(); ++j)
  31. {
  32. if (cornerTemp->points[j].z > cloudKeyPoses6D->points[i].z && cornerTemp->points[j].z < cloudKeyPoses6D->points[i].z + 5)
  33. cornerTemp2->push_back(cornerTemp->points[j]);
  34. }
  35. pcl::PointCloud<PointType>::Ptr surfTemp(new pcl::PointCloud<PointType>());
  36. pcl::PointCloud<PointType>::Ptr surfTemp2(new pcl::PointCloud<PointType>());
  37. *surfTemp = *transformPointCloud(surfCloudKeyFrames[i], &cloudKeyPoses6D->points[i]);
  38. for (int j = 0; j < (int)surfTemp->size(); ++j)
  39. {
  40. if (surfTemp->points[j].z > cloudKeyPoses6D->points[i].z && surfTemp->points[j].z < cloudKeyPoses6D->points[i].z + 5)
  41. surfTemp2->push_back(surfTemp->points[j]);
  42. }
  43. *globalCornerCloud += *cornerTemp2;
  44. *globalSurfCloud += *surfTemp2;
  45. // origin cloud
  46. *globalCornerCloud += *transformPointCloud(cornerCloudKeyFrames[i], &cloudKeyPoses6D->points[i]);
  47. *globalSurfCloud += *transformPointCloud(surfCloudKeyFrames[i], &cloudKeyPoses6D->points[i]);
  48. cout << "\r" << std::flush << "Processing feature cloud " << i << " of " << cloudKeyPoses6D->size() << " ...";
  49. }
  50. // down-sample and save corner cloud
  51. downSizeFilterCorner.setInputCloud(globalCornerCloud);
  52. pcl::io::savePCDFileASCII(savePCDDirectory + "cloudCorner.pcd", *globalCornerCloud);
  53. // down-sample and save surf cloud
  54. downSizeFilterSurf.setInputCloud(globalSurfCloud);
  55. pcl::io::savePCDFileASCII(savePCDDirectory + "cloudSurf.pcd", *globalSurfCloud);
  56. // down-sample and save global point cloud map
  57. *globalMapCloud += *globalCornerCloud;
  58. *globalMapCloud += *globalSurfCloud;
  59. downSizeFilterSurf.setInputCloud(globalMapCloud);
  60. pcl::io::savePCDFileASCII(savePCDDirectory + "cloudGlobal.pcd", *globalMapCloud);
  61. cout << "****************************************************" << endl;
  62. cout << "Saving map to pcd files completed" << endl;
  63. }
  64. void publishGlobalMap()
  65. {
  66. if (pubLaserCloudSurround.getNumSubscribers() == 0)
  67. return;
  68. if (cloudKeyPoses3D->points.empty() == true)
  69. return;
  70. pcl::KdTreeFLANN<PointType>::Ptr kdtreeGlobalMap(new pcl::KdTreeFLANN<PointType>());;
  71. pcl::PointCloud<PointType> ::Ptr globalMapKeyPoses(new pcl::PointCloud<PointType>());
  72. pcl::PointCloud<PointType> ::Ptr globalMapKeyPosesDS(new pcl::PointCloud<PointType>());
  73. pcl::PointCloud<PointType> ::Ptr globalMapKeyFrames(new pcl::PointCloud<PointType>());
  74. pcl::PointCloud<PointType> ::Ptr globalMapKeyFramesDS(new pcl::PointCloud<PointType>());
  75. // kd-tree to find near key frames to visualize
  76. std::vector<int> pointSearchIndGlobalMap;
  77. std::vector<float> pointSearchSqDisGlobalMap;
  78. // search near key frames to visualize
  79. mtx.lock();
  80. kdtreeGlobalMap->setInputCloud(cloudKeyPoses3D);
  81. kdtreeGlobalMap->radiusSearch(cloudKeyPoses3D->back(), globalMapVisualizationSearchRadius, pointSearchIndGlobalMap, pointSearchSqDisGlobalMap, 0);
  82. mtx.unlock();
  83. for (int i = 0; i < (int)pointSearchIndGlobalMap.size(); ++i)
  84. globalMapKeyPoses->push_back(cloudKeyPoses3D->points[pointSearchIndGlobalMap[i]]);
  85. // downsample near selected key frames
  86. pcl::VoxelGrid<PointType> downSizeFilterGlobalMapKeyPoses; // for global map visualization
  87. downSizeFilterGlobalMapKeyPoses.setLeafSize(globalMapVisualizationPoseDensity, globalMapVisualizationPoseDensity, globalMapVisualizationPoseDensity); // for global map visualization
  88. downSizeFilterGlobalMapKeyPoses.setInputCloud(globalMapKeyPoses);
  89. downSizeFilterGlobalMapKeyPoses.filter(*globalMapKeyPosesDS);
  90. // extract visualized and downsampled key frames
  91. for (int i = 0; i < (int)globalMapKeyPosesDS->size(); ++i){
  92. if (pointDistance(globalMapKeyPosesDS->points[i], cloudKeyPoses3D->back()) > globalMapVisualizationSearchRadius)
  93. continue;
  94. int thisKeyInd = (int)globalMapKeyPosesDS->points[i].intensity;
  95. *globalMapKeyFrames += *transformPointCloud(cornerCloudKeyFrames[thisKeyInd], &cloudKeyPoses6D->points[thisKeyInd]);
  96. *globalMapKeyFrames += *transformPointCloud(surfCloudKeyFrames[thisKeyInd], &cloudKeyPoses6D->points[thisKeyInd]);
  97. }
  98. // downsample visualized points
  99. pcl::VoxelGrid<PointType> downSizeFilterGlobalMapKeyFrames; // for global map visualization
  100. downSizeFilterGlobalMapKeyFrames.setLeafSize(globalMapVisualizationLeafSize, globalMapVisualizationLeafSize, globalMapVisualizationLeafSize); // for global map visualization
  101. downSizeFilterGlobalMapKeyFrames.setInputCloud(globalMapKeyFrames);
  102. downSizeFilterGlobalMapKeyFrames.filter(*globalMapKeyFramesDS);
  103. publishCloud(&pubLaserCloudSurround, globalMapKeyFramesDS, timeLaserInfoStamp, "odom");
  104. }
  105. void loopHandler(const std_msgs::Float64MultiArray::ConstPtr& loopMsg)
  106. {
  107. // control loop closure frequency
  108. static double last_loop_closure_time = -1;
  109. {
  110. std::lock_guard<std::mutex> lock(mtx);
  111. if (timeLaserInfoCur - last_loop_closure_time < 5.0)
  112. return;
  113. else
  114. last_loop_closure_time = timeLaserInfoCur;
  115. }
  116. performLoopClosure(*loopMsg);
  117. }
  118. void performLoopClosure(const std_msgs::Float64MultiArray& loopMsg)
  119. {
  120. pcl::PointCloud<PointTypePose>::Ptr copy_cloudKeyPoses6D(new pcl::PointCloud<PointTypePose>());
  121. {
  122. std::lock_guard<std::mutex> lock(mtx);
  123. *copy_cloudKeyPoses6D = *cloudKeyPoses6D;
  124. }
  125. // get lidar keyframe id
  126. int key_cur = -1; // latest lidar keyframe id
  127. int key_pre = -1; // previous lidar keyframe id
  128. {
  129. loopFindKey(loopMsg, copy_cloudKeyPoses6D, key_cur, key_pre);
  130. if (key_cur == -1 || key_pre == -1 || key_cur == key_pre) || abs(key_cur - key_pre) < 25)
  131. return;
  132. }
  133. // check if loop added before
  134. {
  135. // if image loop closure comes at high frequency, many image loop may point to the same key_cur
  136. auto it = loopIndexContainer.find(key_cur);
  137. if (it != loopIndexContainer.end())
  138. return;
  139. }
  140. // get lidar keyframe cloud
  141. pcl::PointCloud<PointType>::Ptr cureKeyframeCloud(new pcl::PointCloud<PointType>());
  142. pcl::PointCloud<PointType>::Ptr prevKeyframeCloud(new pcl::PointCloud<PointType>());
  143. {
  144. loopFindNearKeyframes(copy_cloudKeyPoses6D, cureKeyframeCloud, key_cur, 0);
  145. loopFindNearKeyframes(copy_cloudKeyPoses6D, prevKeyframeCloud, key_pre, historyKeyframeSearchNum);
  146. if (cureKeyframeCloud->size() < 300 || prevKeyframeCloud->size() < 1000)
  147. return;
  148. if (pubHistoryKeyFrames.getNumSubscribers() != 0)
  149. publishCloud(&pubHistoryKeyFrames, prevKeyframeCloud, timeLaserInfoStamp, "odom");
  150. }
  151. // get keyframe pose
  152. Eigen::Affine3f pose_cur;
  153. Eigen::Affine3f pose_pre;
  154. Eigen::Affine3f pose_diff_t; // serves as initial guess
  155. {
  156. pose_cur = pclPointToAffine3f(copy_cloudKeyPoses6D->points[key_cur]);
  157. pose_pre = pclPointToAffine3f(copy_cloudKeyPoses6D->points[key_pre]);
  158. Eigen::Vector3f t_diff;
  159. t_diff.x() = - (pose_cur.translation().x() - pose_pre.translation().x());
  160. t_diff.y() = - (pose_cur.translation().y() - pose_pre.translation().y());
  161. t_diff.z() = - (pose_cur.translation().z() - pose_pre.translation().z());
  162. if (t_diff.norm() < historyKeyframeSearchRadius)
  163. t_diff.setZero();
  164. pose_diff_t = pcl::getTransformation(t_diff.x(), t_diff.y(), t_diff.z(), 0, 0, 0);
  165. }
  166. // transform and rotate cloud for matching
  167. pcl::IterativeClosestPoint<PointType, PointType> icp;
  168. // pcl::GeneralizedIterativeClosestPoint<PointType, PointType> icp;
  169. icp.setMaxCorrespondenceDistance(historyKeyframeSearchRadius * 2);
  170. icp.setMaximumIterations(100);
  171. icp.setRANSACIterations(0);
  172. icp.setTransformationEpsilon(1e-3);
  173. icp.setEuclideanFitnessEpsilon(1e-3);
  174. // initial guess cloud
  175. pcl::PointCloud<PointType>::Ptr cureKeyframeCloud_new(new pcl::PointCloud<PointType>());
  176. pcl::transformPointCloud(*cureKeyframeCloud, *cureKeyframeCloud_new, pose_diff_t);
  177. // match using icp
  178. icp.setInputSource(cureKeyframeCloud_new);
  179. icp.setInputTarget(prevKeyframeCloud);
  180. pcl::PointCloud<PointType>::Ptr unused_result(new pcl::PointCloud<PointType>());
  181. icp.align(*unused_result);
  182. if (pubIcpKeyFrames.getNumSubscribers() != 0)
  183. {
  184. pcl::PointCloud<PointType>::Ptr closed_cloud(new pcl::PointCloud<PointType>());
  185. pcl::transformPointCloud(*cureKeyframeCloud_new, *closed_cloud, icp.getFinalTransformation());
  186. publishCloud(&pubIcpKeyFrames, closed_cloud, timeLaserInfoStamp, "odom");
  187. }
  188. // add graph factor
  189. if (icp.getFitnessScore() < historyKeyframeFitnessScore && icp.hasConverged() == true)
  190. {
  191. // get gtsam pose
  192. gtsam::Pose3 poseFrom = affine3fTogtsamPose3(Eigen::Affine3f(icp.getFinalTransformation()) * pose_diff_t * pose_cur);
  193. gtsam::Pose3 poseTo = pclPointTogtsamPose3(copy_cloudKeyPoses6D->points[key_pre]);
  194. // get noise
  195. float noise = icp.getFitnessScore();
  196. gtsam::Vector Vector6(6);
  197. Vector6 << noise, noise, noise, noise, noise, noise;
  198. noiseModel::Diagonal::shared_ptr constraintNoise = noiseModel::Diagonal::Variances(Vector6);
  199. // save pose constraint
  200. mtx.lock();
  201. loopIndexQueue.push_back(make_pair(key_cur, key_pre));
  202. loopPoseQueue.push_back(poseFrom.between(poseTo));
  203. loopNoiseQueue.push_back(constraintNoise);
  204. mtx.unlock();
  205. // add loop pair to container
  206. loopIndexContainer[key_cur] = key_pre;
  207. }
  208. // visualize loop constraints
  209. if (!loopIndexContainer.empty())
  210. {
  211. visualization_msgs::MarkerArray markerArray;
  212. // loop nodes
  213. visualization_msgs::Marker markerNode;
  214. markerNode.header.frame_id = "odom";
  215. markerNode.header.stamp = timeLaserInfoStamp;
  216. markerNode.action = visualization_msgs::Marker::ADD;
  217. markerNode.type = visualization_msgs::Marker::SPHERE_LIST;
  218. markerNode.ns = "loop_nodes";
  219. markerNode.id = 0;
  220. markerNode.pose.orientation.w = 1;
  221. markerNode.scale.x = 0.3; markerNode.scale.y = 0.3; markerNode.scale.z = 0.3;
  222. markerNode.color.r = 0; markerNode.color.g = 0.8; markerNode.color.b = 1;
  223. markerNode.color.a = 1;
  224. // loop edges
  225. visualization_msgs::Marker markerEdge;
  226. markerEdge.header.frame_id = "odom";
  227. markerEdge.header.stamp = timeLaserInfoStamp;
  228. markerEdge.action = visualization_msgs::Marker::ADD;
  229. markerEdge.type = visualization_msgs::Marker::LINE_LIST;
  230. markerEdge.ns = "loop_edges";
  231. markerEdge.id = 1;
  232. markerEdge.pose.orientation.w = 1;
  233. markerEdge.scale.x = 0.1;
  234. markerEdge.color.r = 0.9; markerEdge.color.g = 0.9; markerEdge.color.b = 0;
  235. markerEdge.color.a = 1;
  236. for (auto it = loopIndexContainer.begin(); it != loopIndexContainer.end(); ++it)
  237. {
  238. int key_cur = it->first;
  239. int key_pre = it->second;
  240. geometry_msgs::Point p;
  241. p.x = copy_cloudKeyPoses6D->points[key_cur].x;
  242. p.y = copy_cloudKeyPoses6D->points[key_cur].y;
  243. p.z = copy_cloudKeyPoses6D->points[key_cur].z;
  244. markerNode.points.push_back(p);
  245. markerEdge.points.push_back(p);
  246. p.x = copy_cloudKeyPoses6D->points[key_pre].x;
  247. p.y = copy_cloudKeyPoses6D->points[key_pre].y;
  248. p.z = copy_cloudKeyPoses6D->points[key_pre].z;
  249. markerNode.points.push_back(p);
  250. markerEdge.points.push_back(p);
  251. }
  252. markerArray.markers.push_back(markerNode);
  253. markerArray.markers.push_back(markerEdge);
  254. pubLoopConstraintEdge.publish(markerArray);
  255. }
  256. }
  257. void loopFindNearKeyframes(const pcl::PointCloud<PointTypePose>::Ptr& copy_cloudKeyPoses6D,
  258. pcl::PointCloud<PointType>::Ptr& nearKeyframes,
  259. const int& key, const int& searchNum)
  260. {
  261. // extract near keyframes
  262. nearKeyframes->clear();
  263. int cloudSize = copy_cloudKeyPoses6D->size();
  264. for (int i = -searchNum; i <= searchNum; ++i)
  265. {
  266. int key_near = key + i;
  267. if (key_near < 0 || key_near >= cloudSize )
  268. continue;
  269. *nearKeyframes += *transformPointCloud(cornerCloudKeyFrames[key_near], &copy_cloudKeyPoses6D->points[key_near]);
  270. *nearKeyframes += *transformPointCloud(surfCloudKeyFrames[key_near], &copy_cloudKeyPoses6D->points[key_near]);
  271. }
  272. if (nearKeyframes->empty())
  273. return;
  274. // downsample near keyframes
  275. pcl::PointCloud<PointType>::Ptr cloud_temp(new pcl::PointCloud<PointType>());
  276. downSizeFilterICP.setInputCloud(nearKeyframes);
  277. downSizeFilterICP.filter(*cloud_temp);
  278. *nearKeyframes = *cloud_temp;
  279. }
  280. void loopFindKey(const std_msgs::Float64MultiArray& loopMsg,
  281. const pcl::PointCloud<PointTypePose>::Ptr& copy_cloudKeyPoses6D,
  282. int& key_cur, int& key_pre)
  283. {
  284. if (loopMsg.data.size() != 2)
  285. return;
  286. double loop_time_cur = loopMsg.data[0];
  287. double loop_time_pre = loopMsg.data[1];
  288. if (abs(loop_time_cur - loop_time_pre) < historyKeyframeSearchTimeDiff)
  289. return;
  290. int cloudSize = copy_cloudKeyPoses6D->size();
  291. if (cloudSize < 2)
  292. return;
  293. // latest key
  294. key_cur = cloudSize - 1;
  295. for (int i = cloudSize - 1; i >= 0; --i)
  296. {
  297. if (copy_cloudKeyPoses6D->points[i].time > loop_time_cur)
  298. key_cur = round(copy_cloudKeyPoses6D->points[i].intensity);
  299. else
  300. break;
  301. }
  302. // previous key
  303. key_pre = 0;
  304. for (int i = 0; i < cloudSize; ++i)
  305. {
  306. if (copy_cloudKeyPoses6D->points[i].time < loop_time_pre)
  307. key_pre = round(copy_cloudKeyPoses6D->points[i].intensity);
  308. else
  309. break;
  310. }
  311. }
  312. void loopClosureThread()
  313. {
  314. if (loopClosureEnableFlag == false)
  315. return;
  316. ros::Rate rate(0.5);
  317. while (ros::ok())
  318. {
  319. rate.sleep();
  320. performLoopClosureDetection();
  321. }
  322. }
  323. void performLoopClosureDetection()
  324. {
  325. std::vector<int> pointSearchIndLoop;
  326. std::vector<float> pointSearchSqDisLoop;
  327. int key_cur = -1;
  328. int key_pre = -1;
  329. double loop_time_cur = -1;
  330. double loop_time_pre = -1;
  331. // find latest key and time 对于latest的理解就是最新的那个,就是当前的,感觉差不多
  332. {
  333. std::lock_guard<std::mutex> lock(mtx); // 先上锁
  334. if (cloudKeyPoses3D->empty())
  335. return;
  336. kdtreeHistoryKeyPoses->setInputCloud(cloudKeyPoses3D);
  337. kdtreeHistoryKeyPoses->radiusSearch(cloudKeyPoses3D->back(), historyKeyframeSearchRadius, pointSearchIndLoop, pointSearchSqDisLoop, 0);
  338. key_cur = cloudKeyPoses3D->size() - 1;
  339. loop_time_cur = cloudKeyPoses6D->points[key_cur].time;
  340. }
  341. // find previous key and time 从小到大去循环
  342. {
  343. for (int i = 0; i < (int)pointSearchIndLoop.size(); ++i)
  344. {
  345. int id = pointSearchIndLoop[i];
  346. if (abs(cloudKeyPoses6D->points[id].time - loop_time_cur) > historyKeyframeSearchTimeDiff)
  347. {
  348. key_pre = id;
  349. loop_time_pre = cloudKeyPoses6D->points[key_pre].time;
  350. break;
  351. }
  352. }
  353. }
  354. // 下面if条件中 都是不正常的情况
  355. if (key_cur == -1 || key_pre == -1 || key_pre == key_cur || loop_time_cur < 0 || loop_time_pre < 0)
  356. return;
  357. std_msgs::Float64MultiArray match_msg;
  358. match_msg.data.push_back(loop_time_cur);
  359. match_msg.data.push_back(loop_time_pre);
  360. performLoopClosure(match_msg);
  361. }

上面是一些关于线程的内容,这个是在最下面的main函数里面出现的,这个main函数相比其他的话,多了两个线程。

  1. void updateInitialGuess()
  2. {
  3. // 前一帧的位姿,这里指lidar的位姿
  4. // Affine3f 仿射变换矩阵(移向量 + 旋转变换组合而成,可以同时实现旋转,缩放,平移等空间变换)
  5. static Eigen::Affine3f lastImuTransformation;
  6. // system initialization 系统初始化
  7. // 如果关键帧集合为空,继续进行初始化
  8. if (cloudKeyPoses3D->points.empty())
  9. {
  10. // 大小为6的数组,当前帧位姿的旋转部分,用激光帧信息中的RPY(来自IMU原始数据)初始化
  11. // 这里前三个变量分别记录了roll pitch yaw三个初始化的值
  12. transformTobeMapped[0] = cloudInfo.imuRollInit;
  13. transformTobeMapped[1] = cloudInfo.imuPitchInit;
  14. transformTobeMapped[2] = cloudInfo.imuYawInit;
  15. // 在utility.h中定义
  16. if (!useImuHeadingInitialization)
  17. transformTobeMapped[2] = 0;
  18. lastImuTransformation = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit, cloudInfo.imuYawInit); // save imu before return;
  19. return;
  20. }
  21. // use VINS odometry estimation for pose guess
  22. // 用当前帧和前一帧对应的imu里程计计算相对位姿变换,再用前一帧的位姿与相对变换,计算当前帧的位姿,存transformTobeMapped
  23. static int odomResetId = 0;
  24. static bool lastVinsTransAvailable = false;
  25. static Eigen::Affine3f lastVinsTransformation;
  26. if (cloudInfo.odomAvailable == true && cloudInfo.odomResetId == odomResetId)
  27. {
  28. ROS_INFO("Using VINS initial guess");
  29. if (lastVinsTransAvailable == false)
  30. {
  31. ROS_INFO("Initializing VINS initial guess");
  32. // 如果是首次积分,则将lastVinsTransformation赋值为根据odom的xyz,RPY转换得到的transform
  33. lastVinsTransformation = pcl::getTransformation(cloudInfo.odomX, cloudInfo.odomY, cloudInfo.odomZ,
  34. cloudInfo.odomRoll, cloudInfo.odomPitch, cloudInfo.odomYaw);
  35. lastVinsTransAvailable = true;
  36. }
  37. else
  38. {
  39. ROS_INFO("Obtaining VINS incremental guess");
  40. // 首先从odom转换成transform,获得当前transform在lastVinsTransformation下的位置
  41. // 当前帧的初始化估计位姿(来自IMU里程计),后面用来计算增量位姿变换
  42. Eigen::Affine3f transBack = pcl::getTransformation(cloudInfo.odomX, cloudInfo.odomY, cloudInfo.odomZ,
  43. cloudInfo.odomRoll, cloudInfo.odomPitch, cloudInfo.odomYaw);
  44. // inverse 逆矩阵
  45. // 当前帧相对于前一帧的位姿变换--imu里程计计算得到
  46. Eigen::Affine3f transIncre = lastVinsTransformation.inverse() * transBack;
  47. // 前一帧的位姿
  48. Eigen::Affine3f transTobe = trans2Affine3f(transformTobeMapped);
  49. // 当前帧的位姿
  50. Eigen::Affine3f transFinal = transTobe * transIncre;
  51. // 更新当前帧位姿transformTobeMapped
  52. // 这里的0 1 2位置代表了RPY 3 4 5 位置代表了xyz,这里的顺序换了一下 小细节
  53. pcl::getTranslationAndEulerAngles(transFinal, transformTobeMapped[3], transformTobeMapped[4], transformTobeMapped[5],
  54. transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]);
  55. // 将视觉惯性里程计的transform赋值为odom的位姿
  56. lastVinsTransformation = pcl::getTransformation(cloudInfo.odomX, cloudInfo.odomY, cloudInfo.odomZ,
  57. cloudInfo.odomRoll, cloudInfo.odomPitch, cloudInfo.odomYaw);
  58. // 保存当前时态的imu位姿
  59. lastImuTransformation = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit, cloudInfo.imuYawInit); // save imu before return;
  60. return;
  61. // transformTobeMapped存储激光雷达位姿
  62. }
  63. }
  64. else
  65. {
  66. ROS_WARN("VINS failure detected.");
  67. // 没有odom信息,或者是第一帧进入的时候
  68. lastVinsTransAvailable = false;
  69. odomResetId = cloudInfo.odomResetId;
  70. }
  71. // 当odo不可用 或者 lastVinsTransAvailable == false
  72. // use imu incremental estimation for pose guess (only rotation)
  73. // 只在第一帧调用(注意上面的return),用imu累计估计位姿(只估计旋转的)
  74. if (cloudInfo.imuAvailable == true)
  75. {
  76. ROS_INFO("Using IMU initial guess");
  77. // 首先从imu转换成transform,获得当前transform在lastImuTransformation下的位置
  78. // 当前帧的姿态角(来自原始imu数据)
  79. Eigen::Affine3f transBack = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit, cloudInfo.imuYawInit);
  80. //当前帧相对于前一帧的姿态变换
  81. Eigen::Affine3f transIncre = lastImuTransformation.inverse() * transBack;
  82. // 将上一时态的imu的transform点乘两个imu时态间的位姿变换,将其赋值给transformTobeMapped数组
  83. // 前一帧的位姿
  84. Eigen::Affine3f transTobe = trans2Affine3f(transformTobeMapped);
  85. Eigen::Affine3f transFinal = transTobe * transIncre;
  86. pcl::getTranslationAndEulerAngles(transFinal, transformTobeMapped[3], transformTobeMapped[4], transformTobeMapped[5],
  87. transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]);
  88. lastImuTransformation = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit, cloudInfo.imuYawInit); // save imu before return;
  89. return;
  90. }
  91. }

一个初始化的操作,没啥好讲的

  1. void extractNearby()
  2. {
  3. pcl::PointCloud<PointType>::Ptr surroundingKeyPoses(new pcl::PointCloud<PointType>());
  4. pcl::PointCloud<PointType>::Ptr surroundingKeyPosesDS(new pcl::PointCloud<PointType>());
  5. std::vector<int> pointSearchInd;
  6. std::vector<float> pointSearchSqDis;
  7. // extract all the nearby key poses and downsample them 提取所有附近的关键点位姿并对其进行降采样
  8. // kdtree的输入,全局关键帧位姿集合(历史所有关键帧集合)
  9. // kdtree作为一个搜索的工具
  10. kdtreeSurroundingKeyPoses->setInputCloud(cloudKeyPoses3D); // create kd-tree
  11. // surroundingKeyframeSearchRadius默认值为50.0,scan-to-map优化的距离
  12. // 对最近的一帧关键帧,在半径区域内搜索空间区域上相邻的关键帧集合
  13. // 搜索的结果存储在 pointSearchInd 和 pointSearchSqDis中
  14. // 对于back就是点云里的最后一个点
  15. // 这个函数的第一个参数 传入一个点在kd树中,搜索这个所有到这个点的距离小于某个值的点
  16. kdtreeSurroundingKeyPoses->radiusSearch(cloudKeyPoses3D->back(), (double)surroundingKeyframeSearchRadius, pointSearchInd, pointSearchSqDis);
  17. for (int i = 0; i < (int)pointSearchInd.size(); ++i)
  18. {
  19. // 将搜索到的每个点,通过id在cloudKeyPose3D中找到对应的关键帧(点云)并加入
  20. int id = pointSearchInd[i];
  21. // 加入相邻关键帧位姿集合中
  22. surroundingKeyPoses->push_back(cloudKeyPoses3D->points[id]);
  23. }
  24. // 降采样
  25. downSizeFilterSurroundingKeyPoses.setInputCloud(surroundingKeyPoses);
  26. // 降采样后的结果存储在DS中
  27. downSizeFilterSurroundingKeyPoses.filter(*surroundingKeyPosesDS);
  28. // also extract some latest key frames in case the robot rotates in one position 同时提取一些最新的关键帧,防止机器人在一个位置旋转
  29. // 加入时间上相邻的一些关键帧,比如当载体在原地转圈,这些帧加进来是合理的
  30. // Question : 原地旋转为什么不算作空间上相邻的关键帧
  31. /*
  32. 暂时的答案(3.7
  33. 3D点云集合中只保存位置信息(xyz),6D点云中保存位置 + 旋转信息(xyz + rpy)
  34. 当原地旋转时,关键帧的xyz应该是保持不变的,所以连续的若干帧会在3D点云集合中只保存一个坐标(是不是这样需要后续看cloudKeyPose3D是怎么添加帧的)
  35. 所以使用3D点云集合搜索空间领域时,会丢失旋转前后的帧(xyz相同,rpy不同),所以需要重新对事件领域进行添加
  36. 这便解释下面if语句中使用的是6D的时间戳
  37. */
  38. int numPoses = cloudKeyPoses3D->size();
  39. for (int i = numPoses-1; i >= 0; --i)
  40. {
  41. if (timeLaserInfoCur - cloudKeyPoses6D->points[i].time < 10.0)
  42. surroundingKeyPosesDS->push_back(cloudKeyPoses3D->points[i]);
  43. else
  44. break;
  45. }
  46. // 将相邻关键帧集合对应的角点、平面点,加入到局部map中,作为scan-to-map匹配的局部点云地图
  47. extractCloud(surroundingKeyPosesDS);
  48. }
  49. // 相邻关键帧集合对应的角点、平面点,加入到map中
  50. // 称之为局部map,后面进行scan-to-map匹配,所用到的map就是这里的相邻关键帧对应点云集合
  51. void extractCloud(pcl::PointCloud<PointType>::Ptr cloudToExtract)
  52. {
  53. std::vector<pcl::PointCloud<PointType>> laserCloudCornerSurroundingVec;
  54. std::vector<pcl::PointCloud<PointType>> laserCloudSurfSurroundingVec;
  55. laserCloudCornerSurroundingVec.resize(cloudToExtract->size()); // 这个是重新设置大小的
  56. laserCloudSurfSurroundingVec.resize(cloudToExtract->size());
  57. // extract surrounding map
  58. // 遍历当前帧(实际是取最近的一个关键帧来找它相邻的关键帧集合)时空唯独上相邻的关键帧集合
  59. #pragma omp parallel for num_threads(numberOfCores) //并行编程
  60. // #pragma omp parallel for num_threads(numberOfCores)
  61. for (int i = 0; i < (int)cloudToExtract->size(); ++i)
  62. {
  63. // 相邻关键帧索引
  64. // intensity也可以放索引:index
  65. int thisKeyInd = (int)cloudToExtract->points[i].intensity;
  66. // 提取50米范围内的点,transformPointCloud作用是返回输入点云乘以输入的transform
  67. // 自定义函数,计算两个点之间的距离
  68. if (pointDistance(cloudKeyPoses3D->points[thisKeyInd], cloudKeyPoses3D->back()) > surroundingKeyframeSearchRadius)
  69. continue;
  70. // 相邻关键帧对应的角点、平面点云,通过6D位姿变换到世界坐标系下
  71. laserCloudCornerSurroundingVec[i] = *transformPointCloud(cornerCloudKeyFrames[thisKeyInd], &cloudKeyPoses6D->points[thisKeyInd]);
  72. laserCloudSurfSurroundingVec[i] = *transformPointCloud(surfCloudKeyFrames[thisKeyInd], &cloudKeyPoses6D->points[thisKeyInd]);
  73. }
  74. // fuse the map
  75. // 赋值线和面特征点击,并且进行下采样
  76. laserCloudCornerFromMap->clear();
  77. laserCloudSurfFromMap->clear();
  78. for (int i = 0; i < (int)cloudToExtract->size(); ++i)
  79. {
  80. // 加入局部map
  81. *laserCloudCornerFromMap += laserCloudCornerSurroundingVec[i];
  82. *laserCloudSurfFromMap += laserCloudSurfSurroundingVec[i];
  83. }
  84. // Downsample the surrounding corner key frames (or map) 降采样 DS后缀downsample
  85. downSizeFilterCorner.setInputCloud(laserCloudCornerFromMap);
  86. downSizeFilterCorner.filter(*laserCloudCornerFromMapDS);
  87. // Downsample the surrounding surf key frames (or map)
  88. downSizeFilterSurf.setInputCloud(laserCloudSurfFromMap);
  89. downSizeFilterSurf.filter(*laserCloudSurfFromMapDS);
  90. // downSizeFilterSurf.setInputCloud(laserCloudSurfFromMap);
  91. // downSizeFilterSurf.filter(*laserCloudSurfFromMapDS);
  92. }
  93. void extractSurroundingKeyFrames()
  94. {
  95. // 关键帧集合为空,直接返回
  96. if (cloudKeyPoses3D->points.empty() == true)
  97. return;
  98. extractNearby();
  99. }

这个地方的逻辑很简单:使用extractSurroundingKeyFrames ->extractNearBy -> extractCloud就可以了,这个函数具体内容可以看看里面,这里就不浪费笔墨了。

  1. // 当前激光帧角点,平面点集合降采样
  2. void downsampleCurrentScan()
  3. {
  4. // Downsample cloud from current scan
  5. // 对第二部分得到的角点和平面点集合进行进一步降采样
  6. // 3.7实验,这个只能写两个,多一个少一个都不行,直接图上的xyz飞了
  7. laserCloudCornerLastDS->clear();
  8. downSizeFilterCorner.setInputCloud(laserCloudCornerLast);
  9. downSizeFilterCorner.filter(*laserCloudCornerLastDS);
  10. laserCloudCornerLastDSNum = laserCloudCornerLastDS->size();
  11. laserCloudSurfLastDS->clear();
  12. downSizeFilterSurf.setInputCloud(laserCloudSurfLast);
  13. downSizeFilterSurf.filter(*laserCloudSurfLastDS);
  14. laserCloudSurfLastDSNum = laserCloudSurfLastDS->size();
  15. // laserCloudSurfLastDS->clear();
  16. // downSizeFilterSurf.setInputCloud(laserCloudSurfLast);
  17. // downSizeFilterSurf.filter(*laserCloudSurfLastDS);
  18. // laserCloudSurfLastDSNum = laserCloudCornerLastDS->size();
  19. }

降采样内容,一开始想再加一个重复的步骤,但是没编译过,不知道效果怎么样,但是作者用了两个的话,应该会有用两个的说法。

  1. void updatePointAssociateToMap()
  2. {
  3. transPointAssociateToMap = trans2Affine3f(transformTobeMapped);
  4. }
  5. void cornerOptimization()
  6. {
  7. // 仿射变换,更新当前位姿与地图间位姿变换
  8. updatePointAssociateToMap();
  9. #pragma omp parallel for num_threads(numberOfCores) // 并行计算
  10. // 遍历点云,构建点到直线的约束
  11. for (int i = 0; i < laserCloudCornerLastDSNum; i++)
  12. {
  13. PointType pointOri, pointSel, coeff;
  14. std::vector<int> pointSearchInd;
  15. std::vector<float> pointSearchSqDis;
  16. // 角点(lidar系的)
  17. pointOri = laserCloudCornerLastDS->points[i];
  18. // 将点从lidar系转换到map坐标系,一个变换的操作,矩阵乘法
  19. pointAssociateToMap(&pointOri, &pointSel);
  20. // 在局部角点map中查找当前角点相邻的5个角点 这里的5可以做个对比实验
  21. kdtreeCornerFromMap->nearestKSearch(pointSel, 5, pointSearchInd, pointSearchSqDis);
  22. // cv库中来存储矩阵的
  23. cv::Mat matA1(3, 3, CV_32F, cv::Scalar::all(0));
  24. cv::Mat matD1(1, 3, CV_32F, cv::Scalar::all(0));
  25. cv::Mat matV1(3, 3, CV_32F, cv::Scalar::all(0));
  26. // 只有最近的点都在一定的阈值内(这里是1m)才进行计算 这里的第5号位置没看明白
  27. if (pointSearchSqDis[4] < 1.0)
  28. {
  29. float cx = 0, cy = 0, cz = 0;
  30. for (int j = 0; j < 5; j++)
  31. {
  32. // 计算5个点的均值坐标,记为中心点
  33. cx += laserCloudCornerFromMapDS->points[pointSearchInd[j]].x;
  34. cy += laserCloudCornerFromMapDS->points[pointSearchInd[j]].y;
  35. cz += laserCloudCornerFromMapDS->points[pointSearchInd[j]].z;
  36. }
  37. cx /= 5; cy /= 5; cz /= 5;
  38. // 根据均值计算协方差(得到一个协方差矩阵 3*3的)
  39. float a11 = 0, a12 = 0, a13 = 0, a22 = 0, a23 = 0, a33 = 0;
  40. for (int j = 0; j < 5; j++)
  41. {
  42. //计算点与中心点之间的距离
  43. float ax = laserCloudCornerFromMapDS->points[pointSearchInd[j]].x - cx;
  44. float ay = laserCloudCornerFromMapDS->points[pointSearchInd[j]].y - cy;
  45. float az = laserCloudCornerFromMapDS->points[pointSearchInd[j]].z - cz;
  46. a11 += ax * ax; a12 += ax * ay; a13 += ax * az;
  47. a22 += ay * ay; a23 += ay * az;
  48. a33 += az * az;
  49. }
  50. a11 /= 5; a12 /= 5; a13 /= 5; a22 /= 5; a23 /= 5; a33 /= 5;
  51. // 构建协方差矩阵,这种构造方式非常无脑,直接填内容
  52. matA1.at<float>(0, 0) = a11; matA1.at<float>(0, 1) = a12; matA1.at<float>(0, 2) = a13;
  53. matA1.at<float>(1, 0) = a12; matA1.at<float>(1, 1) = a22; matA1.at<float>(1, 2) = a23;
  54. matA1.at<float>(2, 0) = a13; matA1.at<float>(2, 1) = a23; matA1.at<float>(2, 2) = a33;
  55. // eigen库,进行特征值分解,求协方差矩阵的特征值(matD1)和特征向量(matV1
  56. cv::eigen(matA1, matD1, matV1);
  57. // 如果最大的特征值相比次大特征值 大很多 则认为构成了线,角点是合格的
  58. // 个人理解:第一大的特征值 比 次大特征值的3倍还要大,执行下面的if条件语句
  59. if (matD1.at<float>(0, 0) > 3 * matD1.at<float>(0, 1))
  60. {
  61. // 当前帧角点坐标(map系下)
  62. float x0 = pointSel.x;
  63. float y0 = pointSel.y;
  64. float z0 = pointSel.z;
  65. // 局部map对应中心角点,沿着特征向量的方向,前后各取一个点
  66. float x1 = cx + 0.1 * matV1.at<float>(0, 0);
  67. float y1 = cy + 0.1 * matV1.at<float>(0, 1);
  68. float z1 = cz + 0.1 * matV1.at<float>(0, 2);
  69. float x2 = cx - 0.1 * matV1.at<float>(0, 0);
  70. float y2 = cy - 0.1 * matV1.at<float>(0, 1);
  71. float z2 = cz - 0.1 * matV1.at<float>(0, 2);
  72. // 这个就是外积,表示三角形的面积
  73. // 这个就是外积,用向量表示三角形的两条边,然后用这两个向量来计算外积,从而得到三角形面积。
  74. float a012 = sqrt(((x0 - x1)*(y0 - y2) - (x0 - x2)*(y0 - y1)) * ((x0 - x1)*(y0 - y2) - (x0 - x2)*(y0 - y1))
  75. + ((x0 - x1)*(z0 - z2) - (x0 - x2)*(z0 - z1)) * ((x0 - x1)*(z0 - z2) - (x0 - x2)*(z0 - z1))
  76. + ((y0 - y1)*(z0 - z2) - (y0 - y2)*(z0 - z1)) * ((y0 - y1)*(z0 - z2) - (y0 - y2)*(z0 - z1)));
  77. // 计算12点构成线段的长度
  78. float l12 = sqrt((x1 - x2)*(x1 - x2) + (y1 - y2)*(y1 - y2) + (z1 - z2)*(z1 - z2));
  79. // 这里的la lb lc应该是文献中定义的东西,这个没看出来表示啥,要读论文
  80. // 这个是和下面算系数的
  81. float la = ((y1 - y2)*((x0 - x1)*(y0 - y2) - (x0 - x2)*(y0 - y1))
  82. + (z1 - z2)*((x0 - x1)*(z0 - z2) - (x0 - x2)*(z0 - z1))) / a012 / l12;
  83. float lb = -((x1 - x2)*((x0 - x1)*(y0 - y2) - (x0 - x2)*(y0 - y1))
  84. - (z1 - z2)*((y0 - y1)*(z0 - z2) - (y0 - y2)*(z0 - z1))) / a012 / l12;
  85. float lc = -((x1 - x2)*((x0 - x1)*(z0 - z2) - (x0 - x2)*(z0 - z1))
  86. + (y1 - y2)*((y0 - y1)*(z0 - z2) - (y0 - y2)*(z0 - z1))) / a012 / l12;
  87. //三角形的高,点到直线的距离
  88. float ld2 = a012 / l12;
  89. //涉及到一个鲁棒核函数,距离越大,s越小,使用距离惩罚因子(权重)
  90. float s = 1 - 0.9 * fabs(ld2);
  91. coeff.x = s * la;
  92. coeff.y = s * lb;
  93. coeff.z = s * lc;
  94. coeff.intensity = s * ld2; // intensity的新存储内容,那么这个intensity就非常灵活,对于不同的部分,它会存储不同意义的数值
  95. // 距离越小,这个因子的影响就越小(距离越大,加权后的影响就越大)
  96. if (s > 0.1)
  97. {
  98. // s大于0.1 当前激光帧角点,加入匹配集合中
  99. laserCloudOriCornerVec[i] = pointOri;
  100. // 角点的参数
  101. coeffSelCornerVec[i] = coeff;
  102. laserCloudOriCornerFlag[i] = true;
  103. }
  104. }
  105. }
  106. }
  107. }
  108. // 当前激光帧平面点寻找局部map匹配点
  109. void surfOptimization()
  110. {
  111. updatePointAssociateToMap(); // 先更新点
  112. #pragma omp parallel for num_threads(numberOfCores) // 并行计算,这里只使用了2个核
  113. for (int i = 0; i < laserCloudSurfLastDSNum; i++)
  114. {
  115. PointType pointOri, pointSel, coeff;
  116. std::vector<int> pointSearchInd;
  117. std::vector<float> pointSearchSqDis;
  118. pointOri = laserCloudSurfLastDS->points[i];
  119. // 这里的sel的话,就是ori经过函数以后的一个输出
  120. pointAssociateToMap(&pointOri, &pointSel);
  121. kdtreeSurfFromMap->nearestKSearch(pointSel, 5, pointSearchInd, pointSearchSqDis); // 一个快速搜索
  122. // 这个地方应该是Ax = B
  123. Eigen::Matrix<float, 5, 3> matA0;
  124. Eigen::Matrix<float, 5, 1> matB0;
  125. Eigen::Vector3f matX0;
  126. // 一个初始化 不过多解释
  127. matA0.setZero();
  128. matB0.fill(-1);
  129. matX0.setZero();
  130. // 这个第5号位置 存疑,应该在下面的代码里面
  131. if (pointSearchSqDis[4] < 1.0)
  132. {
  133. for (int j = 0; j < 5; j++)
  134. {
  135. // 将前5个点存入matA
  136. matA0(j, 0) = laserCloudSurfFromMapDS->points[pointSearchInd[j]].x;
  137. matA0(j, 1) = laserCloudSurfFromMapDS->points[pointSearchInd[j]].y;
  138. matA0(j, 2) = laserCloudSurfFromMapDS->points[pointSearchInd[j]].z;
  139. }
  140. // Ax=B,根据这五个点求解平面方程,进行QR分解,获得平面方程解
  141. matX0 = matA0.colPivHouseholderQr().solve(matB0);
  142. // 平面方程的系数,也是法向量的分量
  143. float pa = matX0(0, 0);
  144. float pb = matX0(1, 0);
  145. float pc = matX0(2, 0);
  146. float pd = 1; // 这个就是平面方程的常数项
  147. // 将matx归一化,得到单位法向量
  148. float ps = sqrt(pa * pa + pb * pb + pc * pc);
  149. pa /= ps; pb /= ps; pc /= ps; pd /= ps;
  150. bool planeValid = true;
  151. // 检查平面是否合格,如果5个点中有点到平面的距离超过0.2米,那么认为这些点太分散了,不构成平面
  152. // 这里的if条件没看明白,应该是一个特定的判别式子,距离的话验证一下?
  153. for (int j = 0; j < 5; j++)
  154. {
  155. if (fabs(pa * laserCloudSurfFromMapDS->points[pointSearchInd[j]].x +
  156. pb * laserCloudSurfFromMapDS->points[pointSearchInd[j]].y +
  157. pc * laserCloudSurfFromMapDS->points[pointSearchInd[j]].z + pd) > 0.2)
  158. {
  159. planeValid = false;
  160. break;
  161. }
  162. }
  163. // 平面合格
  164. if (planeValid)
  165. {
  166. // 当前激光帧点到平面距离
  167. float pd2 = pa * pointSel.x + pb * pointSel.y + pc * pointSel.z + pd;
  168. // 可以理解成权值 也可以说是鲁邦核函数,也可以说是惩罚因子
  169. float s = 1 - 0.9 * fabs(pd2) / sqrt(sqrt(pointSel.x * pointSel.x
  170. + pointSel.y * pointSel.y + pointSel.z * pointSel.z));
  171. // 系数赋值
  172. coeff.x = s * pa;
  173. coeff.y = s * pb;
  174. coeff.z = s * pc;
  175. coeff.intensity = s * pd2; // 这里的intensity赋值为pd2 * s 具体公式参见上面内容
  176. // 这个if条件里面的内容 大体是说 找到了平面匹配点,然后给下面三个变量赋值(加入到集合中)
  177. if (s > 0.1)
  178. {
  179. laserCloudOriSurfVec[i] = pointOri;
  180. coeffSelSurfVec[i] = coeff;
  181. laserCloudOriSurfFlag[i] = true;
  182. }
  183. }
  184. }
  185. }
  186. }
  187. // 提取当前帧中与局部map匹配上了的角点、平面点,加入同一集合
  188. void combineOptimizationCoeffs()
  189. {
  190. // combine corner coeffs
  191. // 遍历当前帧角点集合,提取出与局部map匹配上了的角点
  192. for (int i = 0; i < laserCloudCornerLastDSNum; ++i)
  193. {
  194. if (laserCloudOriCornerFlag[i] == true)
  195. {
  196. laserCloudOri->push_back(laserCloudOriCornerVec[i]);
  197. coeffSel->push_back(coeffSelCornerVec[i]);
  198. }
  199. }
  200. // combine surf coeffs
  201. // 遍历当前帧平面点集合,提取出与局部map匹配上了的平面点
  202. for (int i = 0; i < laserCloudSurfLastDSNum; ++i)
  203. {
  204. if (laserCloudOriSurfFlag[i] == true)
  205. {
  206. laserCloudOri->push_back(laserCloudOriSurfVec[i]);
  207. coeffSel->push_back(coeffSelSurfVec[i]);
  208. }
  209. }
  210. // 上面两个for循环以后,laserCloudOri里面加了角点和平面点 然后coeffsel里面是角点和平面点的系数,这个上面有明确公式
  211. // reset flag for next iteration
  212. // 清空标记 这个判断条件用在了上面两个for循环的if条件中
  213. std::fill(laserCloudOriCornerFlag.begin(), laserCloudOriCornerFlag.end(), false);
  214. std::fill(laserCloudOriSurfFlag.begin(), laserCloudOriSurfFlag.end(), false);
  215. }
  216. // LM优化
  217. bool LMOptimization(int iterCount)
  218. {
  219. // This optimization is from the original loam_velodyne, need to cope with coordinate transformation
  220. // 这个优化来自于原来的loam_velodyne,需要进行坐标变换
  221. // lidar <- camera --- camera <- lidar
  222. // x = z --- x = y
  223. // y = x --- y = z
  224. // z = y --- z = x
  225. // roll = yaw --- roll = pitch
  226. // pitch = roll --- pitch = yaw
  227. // yaw = pitch --- yaw = roll
  228. // 这个是相机到雷达系的转换 这个是雷达到相机系的转换
  229. // 就是后面的值 赋值给前面 举个例子:相机的x赋值给雷达的y 那么雷达的y就要赋值给相机的x 这样就能对应起来了
  230. // 这个在此代码中是这样的 其他的代码 再检查就可以。
  231. // lidar -> camera
  232. // 雷达到相机变换的三轴方向的正弦 和 余弦
  233. float srx = sin(transformTobeMapped[1]);
  234. float crx = cos(transformTobeMapped[1]);
  235. float sry = sin(transformTobeMapped[2]);
  236. float cry = cos(transformTobeMapped[2]);
  237. float srz = sin(transformTobeMapped[0]);
  238. float crz = cos(transformTobeMapped[0]);
  239. int laserCloudSelNum = laserCloudOri->size();
  240. // 当前帧匹配特征点数太少
  241. if (laserCloudSelNum < 50)
  242. {
  243. return false;
  244. }
  245. // 第一个参数是行 第二个参数是列 At就是转置的意思
  246. // CV_32F是浮点型的-像素可以具有0-1.0之间的任何值
  247. // cv::Scalar::all(0)全部初始化为0
  248. cv::Mat matA(laserCloudSelNum, 6, CV_32F, cv::Scalar::all(0));
  249. cv::Mat matAt(6, laserCloudSelNum, CV_32F, cv::Scalar::all(0));
  250. cv::Mat matAtA(6, 6, CV_32F, cv::Scalar::all(0));
  251. cv::Mat matB(laserCloudSelNum, 1, CV_32F, cv::Scalar::all(0));
  252. cv::Mat matAtB(6, 1, CV_32F, cv::Scalar::all(0));
  253. cv::Mat matX(6, 1, CV_32F, cv::Scalar::all(0));
  254. PointType pointOri, coeff;
  255. // 遍历匹配特征点,构建Jacobian矩阵
  256. for (int i = 0; i < laserCloudSelNum; i++)
  257. {
  258. // lidar -> camera
  259. // 参照一开始的转换关系进行转换
  260. pointOri.x = laserCloudOri->points[i].y;
  261. pointOri.y = laserCloudOri->points[i].z;
  262. pointOri.z = laserCloudOri->points[i].x;
  263. // lidar -> camera
  264. // coeff为系数,在上面有具体的公式
  265. coeff.x = coeffSel->points[i].y;
  266. coeff.y = coeffSel->points[i].z;
  267. coeff.z = coeffSel->points[i].x;
  268. coeff.intensity = coeffSel->points[i].intensity;
  269. // in camera
  270. // 啊 。。。 这个应该是很专业的知识了,有些欠缺,这部分要进行补充
  271. float arx = (crx*sry*srz*pointOri.x + crx*crz*sry*pointOri.y - srx*sry*pointOri.z) * coeff.x
  272. + (-srx*srz*pointOri.x - crz*srx*pointOri.y - crx*pointOri.z) * coeff.y
  273. + (crx*cry*srz*pointOri.x + crx*cry*crz*pointOri.y - cry*srx*pointOri.z) * coeff.z;
  274. float ary = ((cry*srx*srz - crz*sry)*pointOri.x
  275. + (sry*srz + cry*crz*srx)*pointOri.y + crx*cry*pointOri.z) * coeff.x
  276. + ((-cry*crz - srx*sry*srz)*pointOri.x
  277. + (cry*srz - crz*srx*sry)*pointOri.y - crx*sry*pointOri.z) * coeff.z;
  278. float arz = ((crz*srx*sry - cry*srz)*pointOri.x + (-cry*crz-srx*sry*srz)*pointOri.y)*coeff.x
  279. + (crx*crz*pointOri.x - crx*srz*pointOri.y) * coeff.y
  280. + ((sry*srz + cry*crz*srx)*pointOri.x + (crz*sry-cry*srx*srz)*pointOri.y)*coeff.z;
  281. // lidar -> camera
  282. matA.at<float>(i, 0) = arz;
  283. matA.at<float>(i, 1) = arx;
  284. matA.at<float>(i, 2) = ary;
  285. matA.at<float>(i, 3) = coeff.z;
  286. matA.at<float>(i, 4) = coeff.x;
  287. matA.at<float>(i, 5) = coeff.y;
  288. //点到直线距离、平面距离,作为观测值。个人认为,这也不能说是距离,反正是个数值
  289. matB.at<float>(i, 0) = -coeff.intensity;
  290. }
  291. cv::transpose(matA, matAt);
  292. matAtA = matAt * matA;
  293. matAtB = matAt * matB;
  294. //高斯牛顿方程,进行解决,用到QR,这个和上面平面方程求解有联系
  295. cv::solve(matAtA, matAtB, matX, cv::DECOMP_QR);
  296. //首次迭代,检查近似Hession矩阵,看是否退化,或者称为奇异,也就是它的行列式的值为0
  297. if (iterCount == 0)
  298. {
  299. cv::Mat matE(1, 6, CV_32F, cv::Scalar::all(0));
  300. cv::Mat matV(6, 6, CV_32F, cv::Scalar::all(0));
  301. cv::Mat matV2(6, 6, CV_32F, cv::Scalar::all(0));
  302. // 猜测matE是特征值 matV是特征向量 是对的,网上搜过
  303. cv::eigen(matAtA, matE, matV);
  304. matV.copyTo(matV2); // 这里V2应该是和V的值一样
  305. isDegenerate = false; // 这个是判断是否退化的,这个退化可以理解为能否求出来它的特征值
  306. float eignThre[6] = {100, 100, 100, 100, 100, 100}; // 这里是设置了个阈值,然后通过的话,v2矩阵列赋值为0,并且isdegenerate赋值为true
  307. for (int i = 5; i >= 0; i--)
  308. {
  309. if (matE.at<float>(0, i) < eignThre[i])
  310. {
  311. for (int j = 0; j < 6; j++)
  312. {
  313. matV2.at<float>(i, j) = 0;
  314. }
  315. isDegenerate = true;
  316. }
  317. else
  318. {
  319. break;
  320. }
  321. }
  322. matP = matV.inv() * matV2; // 不太清楚要干啥,但是知道是个矩阵相乘
  323. }
  324. if (isDegenerate)
  325. {
  326. cv::Mat matX2(6, 1, CV_32F, cv::Scalar::all(0));
  327. matX.copyTo(matX2);
  328. matX = matP * matX2;
  329. }
  330. transformTobeMapped[0] += matX.at<float>(0, 0);
  331. transformTobeMapped[1] += matX.at<float>(1, 0);
  332. transformTobeMapped[2] += matX.at<float>(2, 0);
  333. transformTobeMapped[3] += matX.at<float>(3, 0);
  334. transformTobeMapped[4] += matX.at<float>(4, 0);
  335. transformTobeMapped[5] += matX.at<float>(5, 0);
  336. // rad2deg 把弧度转换为角度
  337. float deltaR = sqrt(pow(pcl::rad2deg(matX.at<float>(0, 0)), 2) + pow(pcl::rad2deg(matX.at<float>(1, 0)), 2) + pow(pcl::rad2deg(matX.at<float>(2, 0)), 2));
  338. float deltaT = sqrt(pow(matX.at<float>(3, 0) * 100, 2) + pow(matX.at<float>(4, 0) * 100, 2) + pow(matX.at<float>(5, 0) * 100, 2));
  339. if (deltaR < 0.05 && deltaT < 0.05)
  340. {
  341. return true; // converged 收敛的 能求出来解的
  342. }
  343. return false; // keep optimizing
  344. }
  345. // scan-to-map优化
  346. void scan2MapOptimization()
  347. {
  348. // 如果关键帧点云数据为空,则直接返回 就是一个鲁棒性判断
  349. if (cloudKeyPoses3D->points.empty())
  350. return;
  351. // 关键点数量 这里是降采样之后的 大于阈值,边为10,面为100
  352. if (laserCloudCornerLastDSNum > edgeFeatureMinValidNum && laserCloudSurfLastDSNum > surfFeatureMinValidNum)
  353. {
  354. // 输入为局部map点云
  355. kdtreeCornerFromMap->setInputCloud(laserCloudCornerFromMapDS); // 我的理解,后面的内容是作为输入的内容
  356. kdtreeSurfFromMap->setInputCloud(laserCloudSurfFromMapDS);
  357. // 设置的一个迭代次数 30
  358. for (int iterCount = 0; iterCount < 30; iterCount++)
  359. {
  360. // 每次迭代清空特征点集合
  361. // 当前帧与局部map匹配上了的角点,平面点,加入同一集合,后面是对应点的参数,这个参数重点关注
  362. laserCloudOri->clear();
  363. coeffSel->clear();
  364. // 当前激光帧角点寻找局部map匹配点 这个要看函数内部定义
  365. // 这里面有一个coeff 具体的计算公式可以点击这个函数的定义,还有la lb lc是什么含义需要解决
  366. cornerOptimization();
  367. // 当前激光帧平面点寻找局部map匹配点
  368. surfOptimization();
  369. // 经过上面两个以后 会得到角点和平面点的coeff 用在下面的LM优化中
  370. // 提取当前帧中与局部map匹配上了的角点、平面点,加入同一集合
  371. combineOptimizationCoeffs();
  372. // scan-to-map优化
  373. // 对匹配特征点计算雅克比矩阵,观测值为特征点到直线、平面的距离,构建高斯牛顿方程,迭代优化当前位姿,存transformTobeMapped
  374. if (LMOptimization(iterCount) == true)
  375. break;
  376. }
  377. transformUpdate();
  378. }
  379. else
  380. {
  381. ROS_WARN("Not enough features! Only %d edge and %d planar features available.", laserCloudCornerLastDSNum, laserCloudSurfLastDSNum);
  382. }
  383. }
  384. // 用imu原始RPY数据与scan-to-map优化后的位姿进行加权融合,更新当前帧位姿的roll pitch 约束z坐标
  385. void transformUpdate()
  386. {
  387. if (cloudInfo.imuAvailable == true)
  388. {
  389. // 俯仰角小于1.4
  390. if (std::abs(cloudInfo.imuPitchInit) < 1.4)
  391. {
  392. double imuWeight = 0.01;
  393. tf::Quaternion imuQuaternion;
  394. tf::Quaternion transformQuaternion;
  395. double rollMid, pitchMid, yawMid;
  396. // slerp roll
  397. // roll角求加权均值,用scan-to-map优化得到的位姿与imu原始RPY数据,进行加权平均
  398. transformQuaternion.setRPY(transformTobeMapped[0], 0, 0);
  399. imuQuaternion.setRPY(cloudInfo.imuRollInit, 0, 0);
  400. // slerp函数,利用求面差值得到介于两个已知旋转之间的近似值
  401. // 转到函数的定义,简要返回四元数,它是该四元数和另一个四元数之间的球面线性插值的结果
  402. tf::Matrix3x3(transformQuaternion.slerp(imuQuaternion, imuWeight)).getRPY(rollMid, pitchMid, yawMid);
  403. transformTobeMapped[0] = rollMid;
  404. // slerp pitch
  405. transformQuaternion.setRPY(0, transformTobeMapped[1], 0);
  406. imuQuaternion.setRPY(0, cloudInfo.imuPitchInit, 0);
  407. tf::Matrix3x3(transformQuaternion.slerp(imuQuaternion, imuWeight)).getRPY(rollMid, pitchMid, yawMid);
  408. transformTobeMapped[1] = pitchMid;
  409. }
  410. }
  411. transformTobeMapped[0] = constraintTransformation(transformTobeMapped[0], rotation_tollerance);
  412. transformTobeMapped[1] = constraintTransformation(transformTobeMapped[1], rotation_tollerance);
  413. transformTobeMapped[5] = constraintTransformation(transformTobeMapped[5], z_tollerance);
  414. }

这里就是涉及到了那4个优化的函数,里面涉及到了一些数学公式的计算,这个还没搞懂,然后coeff到底赋值了些啥,上面都写得很清楚,但是intensity部分的赋值还没看懂。前两个优化,就相当于把角点和平面点匹配到map上面,然后后两个的话就是迭代找最优的X解。这里在LM优化函数里面有雷达到相机 和 相机到雷达的转换关系,这个可以适当记一下。

  1. float constraintTransformation(float value, float limit)
  2. {
  3. if (value < -limit)
  4. value = -limit;
  5. if (value > limit)
  6. value = limit;
  7. return value;
  8. }
  9. bool saveFrame()
  10. {
  11. if (cloudKeyPoses3D->points.empty())
  12. return true;
  13. Eigen::Affine3f transStart = pclPointToAffine3f(cloudKeyPoses6D->back());
  14. Eigen::Affine3f transFinal = pcl::getTransformation(transformTobeMapped[3], transformTobeMapped[4], transformTobeMapped[5],
  15. transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]);
  16. Eigen::Affine3f transBetween = transStart.inverse() * transFinal;
  17. float x, y, z, roll, pitch, yaw;
  18. pcl::getTranslationAndEulerAngles(transBetween, x, y, z, roll, pitch, yaw);
  19. if (abs(roll) < surroundingkeyframeAddingAngleThreshold &&
  20. abs(pitch) < surroundingkeyframeAddingAngleThreshold &&
  21. abs(yaw) < surroundingkeyframeAddingAngleThreshold &&
  22. sqrt(x*x + y*y + z*z) < surroundingkeyframeAddingDistThreshold)
  23. return false;
  24. return true;
  25. }

没怎么看上面这两个函数,应该比较简单。

  1. // 激光里程计因子
  2. void addOdomFactor()
  3. {
  4. // 检查是否有关键帧点
  5. if (cloudKeyPoses3D->points.empty())
  6. {
  7. // 第一帧初始化先验因子
  8. noiseModel::Diagonal::shared_ptr priorNoise = noiseModel::Diagonal::Variances((Vector(6) << 1e-2, 1e-2, M_PI*M_PI, 1e8, 1e8, 1e8).finished()); // rad*rad, meter*meter
  9. gtSAMgraph.add(PriorFactor<Pose3>(0, trans2gtsamPose(transformTobeMapped), priorNoise));
  10. // 变量节点设置初始值
  11. initialEstimate.insert(0, trans2gtsamPose(transformTobeMapped));
  12. }
  13. else
  14. {
  15. // 添加激光里程计因子
  16. noiseModel::Diagonal::shared_ptr odometryNoise = noiseModel::Diagonal::Variances((Vector(6) << 1e-6, 1e-6, 1e-6, 1e-4, 1e-4, 1e-4).finished());
  17. gtsam::Pose3 poseFrom = pclPointTogtsamPose3(cloudKeyPoses6D->points.back());
  18. gtsam::Pose3 poseTo = trans2gtsamPose(transformTobeMapped);
  19. // 添加关键帧最近的两个位姿
  20. // 参数:前一帧id,当前帧id,第一帧与当前帧的位姿变换(作为观测值),噪声协方差
  21. gtSAMgraph.add(BetweenFactor<Pose3>(cloudKeyPoses3D->size()-1, cloudKeyPoses3D->size(), poseFrom.between(poseTo), odometryNoise));
  22. // 变量节点设置初始值
  23. // 下标
  24. initialEstimate.insert(cloudKeyPoses3D->size(), poseTo);
  25. // if (isDegenerate)
  26. // {
  27. // // 如果是退化的
  28. // // adding VINS constraints is deleted as benefits are not obvious, disable for now
  29. // // 添加VINS约束被删除,因为好处不明显,暂时禁用
  30. // gtSAMgraph.add(BetweenFactor<Pose3>(cloudKeyPoses3D->size()-1, cloudKeyPoses3D->size(), vinsPoseFrom.between(vinsPoseTo), odometryNoise));
  31. // }
  32. }
  33. }
  34. // 添加GPS因子
  35. void addGPSFactor()
  36. {
  37. // 一开始都要有鲁棒条件判断
  38. if (gpsQueue.empty())
  39. return; // 直接返回
  40. // wait for system initialized and settles down 等待系统初始化并稳定下来
  41. if (cloudKeyPoses3D->points.empty())
  42. return;
  43. else if (pointDistance(cloudKeyPoses3D->front(), cloudKeyPoses3D->back()) < 5.0)
  44. return;
  45. // pose covariance small, no need to correct 位姿协方差小,无需校正
  46. // 这个44列 和 55列存储的内容 需要验证
  47. if (poseCovariance(3,3) < poseCovThreshold && poseCovariance(4,4) < poseCovThreshold)
  48. return;
  49. // last gps position
  50. static PointType lastGPSPoint;
  51. while (!gpsQueue.empty())
  52. {
  53. if (gpsQueue.front().header.stamp.toSec() < timeLaserInfoCur - 0.2)
  54. {
  55. // message too old 意思是时间间隔超过0.2,说明这个数据有点旧了,就pop出去,双端队列
  56. gpsQueue.pop_front();
  57. }
  58. else if (gpsQueue.front().header.stamp.toSec() > timeLaserInfoCur + 0.2)
  59. {
  60. // message too new
  61. break; // 直接结束while循环
  62. }
  63. else
  64. {
  65. nav_msgs::Odometry thisGPS = gpsQueue.front(); // 得到队首元素
  66. gpsQueue.pop_front();
  67. // GPS too noisy, skip
  68. float noise_x = thisGPS.pose.covariance[0];
  69. float noise_y = thisGPS.pose.covariance[7];
  70. float noise_z = thisGPS.pose.covariance[14];
  71. if (noise_x > gpsCovThreshold || noise_y > gpsCovThreshold) // 如果大于阈值,结束下面部分
  72. continue;
  73. float gps_x = thisGPS.pose.pose.position.x;
  74. float gps_y = thisGPS.pose.pose.position.y;
  75. float gps_z = thisGPS.pose.pose.position.z;
  76. if (!useGpsElevation)
  77. {
  78. gps_z = transformTobeMapped[5];
  79. noise_z = 0.01;
  80. }
  81. // GPS not properly initialized (0,0,0) GPS未正确初始化
  82. if (abs(gps_x) < 1e-6 && abs(gps_y) < 1e-6)
  83. continue; // 跳过下面的内容
  84. // Add GPS every a few meters 每隔几米添加 GPS
  85. PointType curGPSPoint;
  86. curGPSPoint.x = gps_x;
  87. curGPSPoint.y = gps_y;
  88. curGPSPoint.z = gps_z;
  89. if (pointDistance(curGPSPoint, lastGPSPoint) < 5.0)
  90. continue; // 这个5,在前面出现过很多次,是作者团队自己定义的
  91. else
  92. lastGPSPoint = curGPSPoint;
  93. gtsam::Vector Vector3(3);
  94. Vector3 << max(noise_x, 1.0f), max(noise_y, 1.0f), max(noise_z, 1.0f);
  95. noiseModel::Diagonal::shared_ptr gps_noise = noiseModel::Diagonal::Variances(Vector3); // 这个算方差的
  96. gtsam::GPSFactor gps_factor(cloudKeyPoses3D->size(), gtsam::Point3(gps_x, gps_y, gps_z), gps_noise);
  97. gtSAMgraph.add(gps_factor);
  98. aLoopIsClosed = true;
  99. break;
  100. }
  101. }
  102. }
  103. // 闭环因子
  104. void addLoopFactor()
  105. {
  106. // 常规鲁棒判断
  107. if (loopIndexQueue.empty())
  108. return;
  109. for (size_t i = 0; i < loopIndexQueue.size(); ++i)
  110. {
  111. // 添加相邻两个回环位姿
  112. int indexFrom = loopIndexQueue[i].first;
  113. int indexTo = loopIndexQueue[i].second;
  114. gtsam::Pose3 poseBetween = loopPoseQueue[i];
  115. gtsam::noiseModel::Diagonal::shared_ptr noiseBetween = loopNoiseQueue[i];
  116. // 添加到因子图中 添加的内容:两个index 一个pose 一个noise
  117. gtSAMgraph.add(BetweenFactor<Pose3>(indexFrom, indexTo, poseBetween, noiseBetween));
  118. }
  119. // 清空所有的回环数据队列
  120. loopIndexQueue.clear();
  121. loopPoseQueue.clear();
  122. loopNoiseQueue.clear();
  123. aLoopIsClosed = true;
  124. }

添加因子的函数,哲理的话我感觉他们添加因子就是使用了gtSAMgraph.add()函数,然后最后一个参数就是噪声协方差,当然这个协方差越小越好。GPS一开始是注释掉的,等接下来把注释去掉,看看会什么样子。然后是闭环因子,这里的参数是index index pose noise。这里aLoopIsClosed被赋值为true,是不是这个和加入闭环因子有关,或者想多了,就直接理解成表面意思,感觉也可以。

  1. // 因子图优化,更新所有关键帧位姿
  2. void saveKeyFramesAndFactor()
  3. {
  4. // 计算当前帧与前一帧位姿变换,如果变化太小,不设为关键帧,反之设为关键帧
  5. if (saveFrame() == false)
  6. return; // 直接返回
  7. // odom factor
  8. // 激光里程计因子
  9. addOdomFactor();
  10. // 这个gps因子没有加入,这个地方以后研究
  11. // gps factor
  12. //addGPSFactor();
  13. // loop factor
  14. // 闭环检测因子,这个需要VIS先看见,然后用LIS来进一步优化,论文中是这么说的
  15. addLoopFactor();
  16. // update iSAM
  17. // iSAM就是来优化的
  18. isam->update(gtSAMgraph, initialEstimate);
  19. isam->update();
  20. // update以后要清空一下保存的因子图,note!历史数据不会清掉,ISAM保存起来了
  21. gtSAMgraph.resize(0);
  22. initialEstimate.clear();
  23. // save key poses
  24. PointType thisPose3D;
  25. PointTypePose thisPose6D;
  26. Pose3 latestEstimate;
  27. // 优化结果
  28. isamCurrentEstimate = isam->calculateEstimate();
  29. latestEstimate = isamCurrentEstimate.at<Pose3>(isamCurrentEstimate.size()-1);
  30. cout << "****************************************************" << endl;
  31. isamCurrentEstimate.print("Current estimate: ");
  32. // cloudKeyPoses3D 加入当前帧位姿
  33. thisPose3D.x = latestEstimate.translation().x();
  34. thisPose3D.y = latestEstimate.translation().y();
  35. thisPose3D.z = latestEstimate.translation().z();
  36. thisPose3D.intensity = cloudKeyPoses3D->size(); // this can be used as index 这个强度不用来定位,而是用来分类,可以百度
  37. cloudKeyPoses3D->push_back(thisPose3D);
  38. // cloudKeyPoses6D 加入当前帧位姿
  39. // 6D中存有时间戳
  40. thisPose6D.x = thisPose3D.x;
  41. thisPose6D.y = thisPose3D.y;
  42. thisPose6D.z = thisPose3D.z;
  43. thisPose6D.intensity = thisPose3D.intensity ; // this can be used as index
  44. thisPose6D.roll = latestEstimate.rotation().roll();
  45. thisPose6D.pitch = latestEstimate.rotation().pitch();
  46. thisPose6D.yaw = latestEstimate.rotation().yaw();
  47. thisPose6D.time = timeLaserInfoCur;
  48. cloudKeyPoses6D->push_back(thisPose6D); // 加入到cloudKeyPoses6D中
  49. cout << "****************************************************" << endl;
  50. cout << "Pose covariance:" << endl;
  51. cout << isam->marginalCovariance(isamCurrentEstimate.size()-1) << endl << endl;
  52. //位姿协方差
  53. poseCovariance = isam->marginalCovariance(isamCurrentEstimate.size()-1); // 姿态协方差
  54. // save updated transform
  55. // transformTobeMapped中更新当前帧位姿,注意每个位置对应的RPY 和 xyz
  56. // 这个顺序比较特殊,需要留心
  57. transformTobeMapped[0] = latestEstimate.rotation().roll();
  58. transformTobeMapped[1] = latestEstimate.rotation().pitch();
  59. transformTobeMapped[2] = latestEstimate.rotation().yaw();
  60. transformTobeMapped[3] = latestEstimate.translation().x();
  61. transformTobeMapped[4] = latestEstimate.translation().y();
  62. transformTobeMapped[5] = latestEstimate.translation().z();
  63. // save all the received edge and surf points
  64. // 当前帧激光角点、平面点,降采样集合
  65. pcl::PointCloud<PointType>::Ptr thisCornerKeyFrame(new pcl::PointCloud<PointType>());
  66. pcl::PointCloud<PointType>::Ptr thisSurfKeyFrame(new pcl::PointCloud<PointType>());
  67. pcl::copyPointCloud(*laserCloudCornerLastDS, *thisCornerKeyFrame); // 下面这两行,就一个复制
  68. pcl::copyPointCloud(*laserCloudSurfLastDS, *thisSurfKeyFrame);
  69. // save key frame cloud
  70. // 保存特征点降采样集合
  71. cornerCloudKeyFrames.push_back(thisCornerKeyFrame);
  72. surfCloudKeyFrames.push_back(thisSurfKeyFrame);
  73. // save path for visualization
  74. // 更新里程计轨迹,将thisPose6D的位姿加入到/lidar/mapping/path中去
  75. updatePath(thisPose6D);
  76. }

这个没啥好说的,就是加因子的操作。

  1. // 更新因子图中所有变量节点的位姿,也就是所有历史关键帧的位姿,更新里程计轨迹
  2. void correctPoses()
  3. {
  4. // 日常鲁棒性判断
  5. if (cloudKeyPoses3D->points.empty())
  6. return;
  7. if (aLoopIsClosed == true)
  8. {
  9. // clear path
  10. // 清空里程计轨迹
  11. globalPath.poses.clear();
  12. // update key poses
  13. int numPoses = isamCurrentEstimate.size();
  14. // 更新因子图中所有变量节点的位姿,也就是所有历史关键帧的位姿
  15. for (int i = 0; i < numPoses; ++i)
  16. {
  17. // 这里3d表示xyz,6d表示xyz+rpy,代码一目了然
  18. cloudKeyPoses3D->points[i].x = isamCurrentEstimate.at<Pose3>(i).translation().x();
  19. // cloudKeyPoses3D->points[i].x = isamCurrentEstimate.at<Pose3>(i).translation().x();
  20. cloudKeyPoses3D->points[i].y = isamCurrentEstimate.at<Pose3>(i).translation().y();
  21. cloudKeyPoses3D->points[i].z = isamCurrentEstimate.at<Pose3>(i).translation().z();
  22. cloudKeyPoses6D->points[i].x = cloudKeyPoses3D->points[i].x;
  23. cloudKeyPoses6D->points[i].y = cloudKeyPoses3D->points[i].y;
  24. cloudKeyPoses6D->points[i].z = cloudKeyPoses3D->points[i].z;
  25. cloudKeyPoses6D->points[i].roll = isamCurrentEstimate.at<Pose3>(i).rotation().roll();
  26. cloudKeyPoses6D->points[i].pitch = isamCurrentEstimate.at<Pose3>(i).rotation().pitch();
  27. cloudKeyPoses6D->points[i].yaw = isamCurrentEstimate.at<Pose3>(i).rotation().yaw();
  28. // 将修正好的位姿逐一再添加到globalPath中去
  29. updatePath(cloudKeyPoses6D->points[i]);
  30. }
  31. aLoopIsClosed = false; // 这个含义没看懂
  32. // ID for reseting IMU pre-integration
  33. ++imuPreintegrationResetId; // 这个在imu预积分里面见过,好了,知道在这里增加值了
  34. }
  35. }
  36. // 发布激光里程计
  37. void publishOdometry()
  38. {
  39. // Publish odometry for ROS
  40. nav_msgs::Odometry laserOdometryROS;
  41. laserOdometryROS.header.stamp = timeLaserInfoStamp;
  42. laserOdometryROS.header.frame_id = "odom";
  43. laserOdometryROS.child_frame_id = "odom_mapping";
  44. // 猜测transformTobeMapped的123为rpy,事实论证是对的
  45. laserOdometryROS.pose.pose.position.x = transformTobeMapped[3];
  46. laserOdometryROS.pose.pose.position.y = transformTobeMapped[4];
  47. laserOdometryROS.pose.pose.position.z = transformTobeMapped[5];
  48. laserOdometryROS.pose.pose.orientation = tf::createQuaternionMsgFromRollPitchYaw(transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]); // 用四元数
  49. laserOdometryROS.pose.covariance[0] = double(imuPreintegrationResetId);
  50. pubOdomAftMappedROS.publish(laserOdometryROS);
  51. // Publish TF
  52. // 发布TF,odom->lidar
  53. static tf::TransformBroadcaster br;
  54. tf::Transform t_odom_to_lidar = tf::Transform(tf::createQuaternionFromRPY(transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]),
  55. tf::Vector3(transformTobeMapped[3], transformTobeMapped[4], transformTobeMapped[5]));
  56. tf::StampedTransform trans_odom_to_lidar = tf::StampedTransform(t_odom_to_lidar, timeLaserInfoStamp, "odom", "lidar_link");
  57. br.sendTransform(trans_odom_to_lidar);
  58. }
  59. // 更新里程计轨迹,将thisPose6D的位姿加入到/lidar/mapping/path 中去
  60. // 更新路径(位置坐标 + 旋转四元数)
  61. void updatePath(const PointTypePose& pose_in)
  62. {
  63. geometry_msgs::PoseStamped pose_stamped;
  64. pose_stamped.header.stamp = ros::Time().fromSec(pose_in.time);
  65. pose_stamped.header.frame_id = "odom";
  66. pose_stamped.pose.position.x = pose_in.x;
  67. pose_stamped.pose.position.y = pose_in.y;
  68. pose_stamped.pose.position.z = pose_in.z;
  69. tf::Quaternion q = tf::createQuaternionFromRPY(pose_in.roll, pose_in.pitch, pose_in.yaw);
  70. pose_stamped.pose.orientation.x = q.x();
  71. pose_stamped.pose.orientation.y = q.y();
  72. pose_stamped.pose.orientation.z = q.z();
  73. pose_stamped.pose.orientation.w = q.w();
  74. globalPath.poses.push_back(pose_stamped);
  75. }
  76. //发布里程计、点云、轨迹
  77. void publishFrames()
  78. {
  79. if (cloudKeyPoses3D->points.empty())
  80. return;
  81. // publish key poses
  82. // 发布历史关键帧位姿集合
  83. publishCloud(&pubKeyPoses, cloudKeyPoses6D, timeLaserInfoStamp, "odom");
  84. // Publish surrounding key frames
  85. // 发布局部map的降采样特征点集合
  86. publishCloud(&pubRecentKeyFrames, laserCloudSurfFromMapDS, timeLaserInfoStamp, "odom");
  87. // publish registered key frame
  88. // 发布历史帧(累加的)的角点,平面点降采样集合
  89. if (pubRecentKeyFrame.getNumSubscribers() != 0)
  90. {
  91. pcl::PointCloud<PointType>::Ptr cloudOut(new pcl::PointCloud<PointType>());
  92. PointTypePose thisPose6D = trans2PointTypePose(transformTobeMapped);
  93. // transformPointCloud作用是返回输入点云乘以输入的transform
  94. *cloudOut += *transformPointCloud(laserCloudCornerLastDS, &thisPose6D);
  95. *cloudOut += *transformPointCloud(laserCloudSurfLastDS, &thisPose6D);
  96. publishCloud(&pubRecentKeyFrame, cloudOut, timeLaserInfoStamp, "odom");
  97. }
  98. // publish registered high-res raw cloud
  99. if (pubCloudRegisteredRaw.getNumSubscribers() != 0)
  100. {
  101. pcl::PointCloud<PointType>::Ptr cloudOut(new pcl::PointCloud<PointType>());
  102. pcl::fromROSMsg(cloudInfo.cloud_deskewed, *cloudOut);
  103. PointTypePose thisPose6D = trans2PointTypePose(transformTobeMapped);
  104. *cloudOut = *transformPointCloud(cloudOut, &thisPose6D);
  105. publishCloud(&pubCloudRegisteredRaw, cloudOut, timeLaserInfoStamp, "odom");
  106. }
  107. // publish path
  108. // 发布路径
  109. if (pubPath.getNumSubscribers() != 0)
  110. {
  111. globalPath.header.stamp = timeLaserInfoStamp;
  112. globalPath.header.frame_id = "odom";
  113. pubPath.publish(globalPath);
  114. }
  115. }
  116. };

这些也没啥好说的感觉,就直接看代码就可以。

  1. int main(int argc, char** argv)
  2. {
  3. ros::init(argc, argv, "lidar");
  4. mapOptimization MO;
  5. ROS_INFO("\033[1;32m----> Lidar Map Optimization Started.\033[0m");
  6. // 这两个线程,重点看
  7. std::thread loopDetectionthread(&mapOptimization::loopClosureThread, &MO);
  8. std::thread visualizeMapThread(&mapOptimization::visualizeGlobalMapThread, &MO);
  9. ros::spin();
  10. // 线程加入到主线程
  11. loopDetectionthread.join();
  12. visualizeMapThread.join();
  13. return 0;
  14. }

然后最后这个主函数,没啥好说的,前面差不多说过了,这里多了两个线程,一个是闭环的,另一个是可视化地图的,没啥好讲的,这两个线程还不是很熟悉,再读一遍代码的时候认真研究。然后spin(),然后线程的join操作,最后返回0.这个没啥好说的,本篇结束!

接下来的话,会进行视觉部分的代码解析。这个部分的话,问题好像有,但是忘记了,记得有个变量不清楚里面赋值是啥,但是后面读着读着就忘记这个变量是啥了。。。 好了 再读一遍的时候立刻解决问题!!!

声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/AllinToyou/article/detail/227170
推荐阅读
相关标签