当前位置:   article > 正文

fast-lio2代码解析_fastlio2源码解读

fastlio2源码解读

        代码结构很清晰,从最外层看包含两个文件夹,一个是fast-lio,另外一个是加上scan-context的回环检测与位姿图优化。

fast-lio

主要是论文的fast-lio2论文的实现,包括前向处理和ikd-tree的实现

 

 1.先从cmakelist入手看代码结构:

  1. #这是定义代码中的ROOT_DIR
  2. add_definitions(-DROOT_DIR=\"${CMAKE_CURRENT_SOURCE_DIR}/\")
  3. #寻找机器的cpu核数,来选择是否采用多核计算,且留一个核的余量
  4. if(CMAKE_SYSTEM_PROCESSOR MATCHES "(x86)|(X86)|(amd64)|(AMD64)" )
  5. include(ProcessorCount)
  6. ProcessorCount(N)
  7. message("Processer number: ${N}")
  8. if(N GREATER 4)
  9. add_definitions(-DMP_EN)
  10. add_definitions(-DMP_PROC_NUM=3)
  11. message("core for MP: 3")
  12. elseif(N GREATER 3)
  13. add_definitions(-DMP_EN)
  14. add_definitions(-DMP_PROC_NUM=2)
  15. message("core for MP: 2")
  16. else()
  17. add_definitions(-DMP_PROC_NUM=1)
  18. endif()
  19. else()
  20. add_definitions(-DMP_PROC_NUM=1)
  21. endif()
  22. #依赖openMP PythonLibs MATPLOTLIB_CPP_INCLUDE_DIRS绘图库
  23. #自定义了 Pose6D.msg
  24. add_message_files(
  25. FILES
  26. Pose6D.msg
  27. )
  28. #主要程序是
  29. src/laserMapping.cpp
  30. include/ikd-Tree/ikd_Tree.cpp
  31. src/preprocess.cpp

 Pose6D.msg:

雷达在IMU坐标系下的预积分值

float64   IMU 和 第一帧雷达点的时延
float64[3] acc       # the preintegrated total acceleration (global frame) at the Lidar origin
float64[3] gyr       # the unbiased angular velocity (body frame) at the Lidar origin
float64[3] vel       # the preintegrated velocity (global frame) at the Lidar origin
float64[3] pos       # the preintegrated position (global frame) at the Lidar origin
float64[9] rot       # the preintegrated rotation (global frame) at the Lidar origin

 主程序入口在src/laserMapping.cpp 中,其他的两个cpp以库的形式给它使用

main()程序流程:

ros节点初始化-》参数读取--》参数初始化、指针初始化---》读取的雷达和IMU外参矩阵---》IMU积分参数设置,如测量协方差 ----》设置卡尔曼滤波器的参数,如迭代精度设置、迭代次数,迭代卡尔曼滤波器模型等-----》日志记录初始化

1. 获取激光雷达类型之后,开始订阅standard_pcl_cbk() 、    imu_cbk()

time_buffer为基于激光时间戳的队列,安装激光时间进行处理

  1. void standard_pcl_cbk(const sensor_msgs::PointCloud2::ConstPtr &msg) //velodyne回调
  2. {
  3. mtx_buffer.lock();
  4. scan_count ++;
  5. double preprocess_start_time = omp_get_wtime();//可以理解为当前时间戳
  6. if (msg->header.stamp.toSec() < last_timestamp_lidar) //检测激光时间戳是否异常
  7. {
  8. ROS_ERROR("lidar loop back, clear buffer");
  9. lidar_buffer.clear();
  10. }
  11. PointCloudXYZI::Ptr ptr(new PointCloudXYZI());
  12. p_pre->process(msg, ptr); //激光雷达预处理,获得特征点云
  13. lidar_buffer.push_back(ptr); //激光雷达预处理完的雷达数据
  14. time_buffer.push_back(msg->header.stamp.toSec()); //time_buffer是以激光雷达时间戳为基准的时间戳队列
  15. last_timestamp_lidar = msg->header.stamp.toSec();
  16. s_plot11[scan_count] = omp_get_wtime() - preprocess_start_time; //用于绘图显示处理时间
  17. mtx_buffer.unlock();
  18. sig_buffer.notify_all(); //信号量的提示 唤醒线程
  19. }
  20. void imu_cbk(const sensor_msgs::Imu::ConstPtr &msg_in)
  21. {
  22. publish_count ++;
  23. // cout<<"IMU got at: "<<msg_in->header.stamp.toSec()<<endl;
  24. sensor_msgs::Imu::Ptr msg(new sensor_msgs::Imu(*msg_in));
  25. if (abs(timediff_lidar_wrt_imu) > 0.1 && time_sync_en) //timediff_lidar_wrt_imu仅在使用lovix雷达时才会使用
  26. {
  27. msg->header.stamp = \
  28. ros::Time().fromSec(timediff_lidar_wrt_imu + msg_in->header.stamp.toSec());
  29. }
  30. double timestamp = msg->header.stamp.toSec(); //经过补偿的IMU时间戳,如果是lovix雷达才需要补偿,其他不需要
  31. mtx_buffer.lock();
  32. if (timestamp < last_timestamp_imu) //校验IMU时间戳的一维性,检测跳变
  33. {
  34. ROS_WARN("imu loop back, clear buffer");
  35. imu_buffer.clear();
  36. }
  37. last_timestamp_imu = timestamp; //最新IMU的时间
  38. imu_buffer.push_back(msg); //数据插入队列中
  39. mtx_buffer.unlock();
  40. sig_buffer.notify_all(); //有信号时,唤醒线程
  41. }

       此次的激光点云回调会调用预处理类,获得特征点云的输出。

         然后开启ros的无限循环,当然,此处添加了信号处理,通常终端结束进程时是通过发送信号的,当收到信号时,唤醒所以线程。

2.这里需要先看测量量的定义:

包括了当前帧点云和imu数据队列

  1. struct MeasureGroup // Lidar data and imu dates for the curent process
  2. {
  3. MeasureGroup()
  4. {
  5. lidar_beg_time = 0.0;
  6. this->lidar.reset(new PointCloudXYZI());
  7. };
  8. double lidar_beg_time;
  9. PointCloudXYZI::Ptr lidar;
  10. deque<sensor_msgs::Imu::ConstPtr> imu;
  11. };

3.然后看数据同步:bool sync_packages(MeasureGroup &meas)

  1. //这部分主要处理了buffer中的数据,将两帧激光雷达点云数据时间内的IMU数据从缓存队列中取出,进行时间对齐,并保存到meas中
  2. bool sync_packages(MeasureGroup &meas)
  3. {
  4. if (lidar_buffer.empty() || imu_buffer.empty()) {
  5. return false;
  6. }
  7. /*** push a lidar scan ***/
  8. if(!lidar_pushed) //如果程序初始化时没指定,默认值是false, 是否已经将测量值插入雷达帧数据
  9. {
  10. meas.lidar = lidar_buffer.front(); //将雷达队列最前面的数据塞入测量值
  11. if(meas.lidar->points.size() <= 1) //保证塞入的雷达数据点都是有效的
  12. {
  13. lidar_buffer.pop_front();
  14. return false;
  15. }
  16. meas.lidar_beg_time = time_buffer.front(); //雷达的时间按照time_buffer队首处理,因为它存的就是雷达的时间戳
  17. //雷达帧头的时间戳是帧头的时间戳,这和驱动有关系,通过公式推导该帧激光的帧尾时间戳
  18. lidar_end_time = meas.lidar_beg_time + meas.lidar->points.back().curvature / double(1000);
  19. lidar_pushed = true; // 成功提取到lidar测量的标志
  20. }
  21. if (last_timestamp_imu < lidar_end_time) //如果最新的IMU时间戳都闭雷达帧尾的时间早,则这一帧不处理了
  22. {
  23. return false;
  24. }
  25. /*** push imu data, and pop from imu buffer ***/
  26. double imu_time = imu_buffer.front()->header.stamp.toSec(); //从最早的IMU队列开始,初始化imu_time
  27. meas.imu.clear();
  28. while ((!imu_buffer.empty()) && (imu_time < lidar_end_time))
  29. {
  30. imu_time = imu_buffer.front()->header.stamp.toSec(); //从最早的IMU队列开始
  31. if(imu_time > lidar_end_time) break; //没有跳出循环的话就会将IMU数据添加进去测量量
  32. meas.imu.push_back(imu_buffer.front());
  33. imu_buffer.pop_front(); //弹出已经塞进测量量的IMU数据
  34. }
  35. //从这出来的,测量数据中包含了当前帧的激光数据, 当前帧帧尾结束前的新增IMU数据
  36. lidar_buffer.pop_front(); //处理过的数据出栈
  37. time_buffer.pop_front();
  38. lidar_pushed = false; //又重新置位,这样下一帧雷达来了又可以刷新时间,获取点云帧头和帧尾的时间
  39. return true;
  40. }

        这个同步是基于激光雷达的数据存入测量量,获得帧头和帧尾之间的IMU数据队列,存入测量量中。

4.上面用到了激光的预处理,这里先插播激光预处理的内容:

        通过实例 shared_ptr<Preprocess> p_pre(new Preprocess());进行预处理,预处理仅在激光回调中使用,激光回调前是读取参数设置预处理的参数。

preprocess.h/cpp

#define IS_VALID(a)  ((abs(a)>1e8) ? true : false)  //定义一个数字是否有效

//使用枚举变量描述激光的几个特征,

enum LID_TYPE{AVIA = 1, VELO16, OUST64}; //{1, 2, 3}

enum Feature{Nor, Poss_Plane, Real_Plane, Edge_Jump, Edge_Plane, Wire, ZeroPoint};

enum Surround{Prev, Next};

enum E_jump{Nr_nor, Nr_zero, Nr_180, Nr_inf, Nr_blind};

  1. void Preprocess::set(bool feat_en, int lid_type, double bld, int pfilt_num)
  2. {
  3. feature_enabled = feat_en;
  4. lidar_type = lid_type;
  5. blind = bld;
  6. point_filter_num = pfilt_num; //设置雷达盲区和类型
  7. }
  8. //针对机械雷达
  9. void Preprocess::process(const sensor_msgs::PointCloud2::ConstPtr &msg, PointCloudXYZI::Ptr &pcl_out)
  10. {
  11. switch (lidar_type)
  12. {
  13. case OUST64:
  14. oust64_handler(msg);
  15. break;
  16. case VELO16:
  17. velodyne_handler(msg);
  18. break;
  19. default:
  20. printf("Error LiDAR Type");
  21. break;
  22. }
  23. *pcl_out = pl_surf;//输出分割后的面点
  24. }
  25. //将点云格式转化为ROS消息类型,但是没有发布
  26. void Preprocess::pub_func(PointCloudXYZI &pl, const ros::Time &ct)
  27. {
  28. pl.height = 1; pl.width = pl.size();
  29. sensor_msgs::PointCloud2 output;
  30. pcl::toROSMsg(pl, output);
  31. output.header.frame_id = "livox";
  32. output.header.stamp = ct;
  33. }
  34. void Preprocess::velodyne_handler(const sensor_msgs::PointCloud2::ConstPtr &msg)
  35. {
  36. pl_surf.clear();
  37. pl_corn.clear();
  38. pl_full.clear(); //清空面点、角点点云
  39. pcl::PointCloud<velodyne_ros::Point> pl_orig;
  40. pcl::fromROSMsg(*msg, pl_orig);
  41. int plsize = pl_orig.points.size();
  42. pl_surf.reserve(plsize);//原始点云大小
  43. bool is_first[MAX_LINE_NUM];
  44. double yaw_fp[MAX_LINE_NUM]={0}; // yaw of first scan point
  45. double omega_l=3.61; // scan angular velocity //10Hz 0.1s转360
  46. float yaw_last[MAX_LINE_NUM]={0.0}; // yaw of last scan point
  47. float time_last[MAX_LINE_NUM]={0.0}; // last offset time
  48. if (pl_orig.points[plsize - 1].time > 0) //假如提供了每个点的时间戳
  49. {
  50. given_offset_time = true; //提供时间偏移
  51. }
  52. else
  53. {
  54. given_offset_time = false;
  55. memset(is_first, true, sizeof(is_first)); //初始化数组
  56. double yaw_first = atan2(pl_orig.points[0].y, pl_orig.points[0].x) * 57.29578; //180/PI = 57.29
  57. double yaw_end = yaw_first; //该帧第一个点的yaw角
  58. int layer_first = pl_orig.points[0].ring; //该帧第一个点的所在环
  59. for (uint i = plsize - 1; i > 0; i--)
  60. {
  61. if (pl_orig.points[i].ring == layer_first)
  62. {
  63. yaw_end = atan2(pl_orig.points[i].y, pl_orig.points[i].x) * 57.29578; //在同一个线上的点的yaw角
  64. break;
  65. }
  66. } //所以这里的yaw_end角是指和第一个点的同线序的点圆环的角度
  67. }
  68. if(feature_enabled) //使用特征,这个参数打开
  69. {
  70. for (int i = 0; i < N_SCANS; i++)
  71. {
  72. pl_buff[i].clear();
  73. pl_buff[i].reserve(plsize);
  74. }
  75. for (int i = 0; i < plsize; i++)
  76. {
  77. PointType added_pt;
  78. added_pt.normal_x = 0; //法线
  79. added_pt.normal_y = 0;
  80. added_pt.normal_z = 0;
  81. int layer = pl_orig.points[i].ring;
  82. if (layer >= N_SCANS) continue; //这里过滤掉设置的线束N_SCANS,如果真实的雷达和N_SCANS不一致,用的是N_SCANS
  83. added_pt.x = pl_orig.points[i].x;
  84. added_pt.y = pl_orig.points[i].y;
  85. added_pt.z = pl_orig.points[i].z;
  86. added_pt.intensity = pl_orig.points[i].intensity;
  87. added_pt.curvature = pl_orig.points[i].time / 1000.0; // units: ms 用pcl点中曲率字段存每个点的时间,和lego-loam有点相似
  88. if (!given_offset_time) //因为点的遍历是从后往前的
  89. {
  90. double yaw_angle = atan2(added_pt.y, added_pt.x) * 57.2957;
  91. if (is_first[layer]) //is_first最开始初始化都是true的,处理了过后就是false
  92. {
  93. // printf("layer: %d; is first: %d", layer, is_first[layer]);
  94. yaw_fp[layer]=yaw_angle; //按点的顺序记录了这个一线的最后yaw角
  95. is_first[layer]=false;
  96. added_pt.curvature = 0.0; //将这个点的曲率设置为0,也就是说曲率为0 的点为该所在线的第一个点
  97. yaw_last[layer]=yaw_angle;
  98. time_last[layer]=added_pt.curvature; //将这个点的timelast设置为0
  99. continue;
  100. }
  101. if (yaw_angle <= yaw_fp[layer]) //时间早于这个最后一个点,通过按照匀角速度的方式插值每个点的时间
  102. {
  103. added_pt.curvature = (yaw_fp[layer]-yaw_angle) / omega_l;
  104. }
  105. else //当前点的时间晚于这个最后一个点,通过按照匀角速度的方式插值每个点的时间,但是是超了一圈的
  106. {
  107. added_pt.curvature = (yaw_fp[layer]-yaw_angle+360.0) / omega_l;
  108. }
  109. //time_last[layer] = 0
  110. if (added_pt.curvature < time_last[layer]) added_pt.curvature+=360.0/omega_l;
  111. yaw_last[layer] = yaw_angle; //存下这个点
  112. time_last[layer]=added_pt.curvature;
  113. }
  114. pl_buff[layer].points.push_back(added_pt); //分层,将一帧点云分成多线存储在pl_buff
  115. }
  116. for (int j = 0; j < N_SCANS; j++)
  117. {
  118. PointCloudXYZI &pl = pl_buff[j];//第N线的点云,而不是单个点
  119. int linesize = pl.size(); //每个点云的小
  120. if (linesize < 2) continue;
  121. vector<orgtype> &types = typess[j];
  122. types.clear();
  123. types.resize(linesize);//重新分配内存
  124. linesize--;
  125. for (uint i = 0; i < linesize; i++)
  126. {
  127. types[i].range = sqrt(pl[i].x * pl[i].x + pl[i].y * pl[i].y); //平面距离,用来确定盲区
  128. vx = pl[i].x - pl[i + 1].x;
  129. vy = pl[i].y - pl[i + 1].y;
  130. vz = pl[i].z - pl[i + 1].z;
  131. types[i].dista = vx * vx + vy * vy + vz * vz; //空间距离
  132. }
  133. types[linesize].range = sqrt(pl[linesize].x * pl[linesize].x + pl[linesize].y * pl[linesize].y);
  134. give_feature(pl, types); //每个线点云给出类型
  135. }
  136. }
  137. else //不使用特征 默认不使用特征
  138. {
  139. for (int i = 0; i < plsize; i++)
  140. {
  141. PointType added_pt;
  142. // cout<<"!!!!!!"<<i<<" "<<plsize<<endl;
  143. added_pt.normal_x = 0;
  144. added_pt.normal_y = 0;
  145. added_pt.normal_z = 0;
  146. added_pt.x = pl_orig.points[i].x;
  147. added_pt.y = pl_orig.points[i].y;
  148. added_pt.z = pl_orig.points[i].z;
  149. added_pt.intensity = pl_orig.points[i].intensity;
  150. added_pt.curvature = pl_orig.points[i].time / 1000.0; //需要驱动带有时间戳,用曲率来存放时间
  151. if (!given_offset_time) //没有给出偏置时间
  152. {
  153. int layer = pl_orig.points[i].ring;
  154. double yaw_angle = atan2(added_pt.y, added_pt.x) * 57.2957;
  155. if (is_first[layer]) //该线第一个yaw角
  156. {
  157. // printf("layer: %d; is first: %d", layer, is_first[layer]);
  158. yaw_fp[layer]=yaw_angle;
  159. is_first[layer]=false;
  160. added_pt.curvature = 0.0;
  161. yaw_last[layer]=yaw_angle;
  162. time_last[layer]=added_pt.curvature;
  163. continue;
  164. }
  165. // compute offset time
  166. if (yaw_angle <= yaw_fp[layer])
  167. {
  168. added_pt.curvature = (yaw_fp[layer]-yaw_angle) / omega_l;
  169. }//时间补偿,根据yaw角差
  170. else
  171. {
  172. added_pt.curvature = (yaw_fp[layer]-yaw_angle+360.0) / omega_l;
  173. }
  174. if (added_pt.curvature < time_last[layer]) added_pt.curvature+=360.0/omega_l;
  175. // added_pt.curvature = pl_orig.points[i].t;
  176. yaw_last[layer] = yaw_angle;
  177. time_last[layer]=added_pt.curvature;
  178. }
  179. // if(i==(plsize-1)) printf("index: %d layer: %d, yaw: %lf, offset-time: %lf, condition: %d\n", i, layer, yaw_angle, added_pt.curvature, prints);
  180. if (i % point_filter_num == 0) //间隔几个点
  181. {
  182. if(added_pt.x*added_pt.x+added_pt.y*added_pt.y+added_pt.z*added_pt.z > blind) //大于盲区的
  183. {
  184. pl_surf.points.push_back(added_pt);
  185. // printf("time mode: %d time: %d \n", given_offset_time, pl_orig.points[i].t);
  186. }
  187. }
  188. }
  189. }
  190. // pub_func(pl_surf, pub_full, msg->header.stamp);
  191. // pub_func(pl_surf, pub_surf, msg->header.stamp);
  192. // pub_func(pl_surf, pub_corn, msg->header.stamp);
  193. }

 默认是不使用特征的,输入原始激光点云, 输出pl_surf点云给主程序。

5.ros的主要流程,也即整个SLAM的流程

  1. //ROS循环的主要流程
  2. signal(SIGINT, SigHandle);
  3. ros::Rate rate(5000);
  4. bool status = ros::ok();
  5. while (status)
  6. {
  7. if (flg_exit) break;
  8. ros::spinOnce();
  9. if(sync_packages(Measures))
  10. {
  11. if (flg_reset)
  12. {
  13. ROS_WARN("reset when rosbag play back");
  14. p_imu->Reset();
  15. flg_reset = false;
  16. Measures.imu.clear();
  17. continue;
  18. }
  19. double t0,t1,t2,t3,t4,t5,match_start, solve_start, svd_time;
  20. match_time = 0;
  21. kdtree_search_time = 0.0;
  22. solve_time = 0;
  23. solve_const_H_time = 0;
  24. svd_time = 0;
  25. t0 = omp_get_wtime();
  26. p_imu->Process(Measures, kf, feats_undistort);
  27. state_point = kf.get_x();
  28. pos_lid = state_point.pos + state_point.rot * state_point.offset_T_L_I;
  29. if (feats_undistort->empty() || (feats_undistort == NULL))
  30. {
  31. first_lidar_time = Measures.lidar_beg_time;
  32. p_imu->first_lidar_time = first_lidar_time;
  33. // cout<<"FAST-LIO not ready"<<endl;
  34. continue;
  35. }
  36. flg_EKF_inited = (Measures.lidar_beg_time - first_lidar_time) < INIT_TIME ? \
  37. false : true;
  38. /*** Segment the map in lidar FOV ***/
  39. lasermap_fov_segment();
  40. /*** downsample the feature points in a scan ***/
  41. downSizeFilterSurf.setInputCloud(feats_undistort);
  42. downSizeFilterSurf.filter(*feats_down_body);
  43. t1 = omp_get_wtime();
  44. feats_down_size = feats_down_body->points.size();
  45. /*** initialize the map kdtree ***/
  46. if(ikdtree.Root_Node == nullptr)
  47. {
  48. if(feats_down_size > 5)
  49. {
  50. ikdtree.set_downsample_param(filter_size_map_min);
  51. feats_down_world->resize(feats_down_size);
  52. for(int i = 0; i < feats_down_size; i++)
  53. {
  54. pointBodyToWorld(&(feats_down_body->points[i]), &(feats_down_world->points[i]));
  55. }
  56. ikdtree.Build(feats_down_world->points);
  57. }
  58. continue;
  59. }
  60. int featsFromMapNum = ikdtree.validnum();
  61. kdtree_size_st = ikdtree.size();
  62. // cout<<"[ mapping ]: In num: "<<feats_undistort->points.size()<<" downsamp "<<feats_down_size<<" Map num: "<<featsFromMapNum<<"effect num:"<<effct_feat_num<<endl;
  63. /*** ICP and iterated Kalman filter update ***/
  64. normvec->resize(feats_down_size);
  65. feats_down_world->resize(feats_down_size);
  66. V3D ext_euler = SO3ToEuler(state_point.offset_R_L_I);
  67. fout_pre<<setw(20)<<Measures.lidar_beg_time - first_lidar_time<<" "<<euler_cur.transpose()<<" "<< state_point.pos.transpose()<<" "<<ext_euler.transpose() << " "<<state_point.offset_T_L_I.transpose()<< " " << state_point.vel.transpose() \
  68. <<" "<<state_point.bg.transpose()<<" "<<state_point.ba.transpose()<<" "<<state_point.grav<< endl;
  69. if(0) // If you need to see map point, change to "if(1)"
  70. {
  71. PointVector ().swap(ikdtree.PCL_Storage);
  72. ikdtree.flatten(ikdtree.Root_Node, ikdtree.PCL_Storage, NOT_RECORD);
  73. featsFromMap->clear();
  74. featsFromMap->points = ikdtree.PCL_Storage;
  75. }
  76. pointSearchInd_surf.resize(feats_down_size);
  77. Nearest_Points.resize(feats_down_size);
  78. int rematch_num = 0;
  79. bool nearest_search_en = true; //
  80. t2 = omp_get_wtime();
  81. /*** iterated state estimation ***/
  82. double t_update_start = omp_get_wtime();
  83. double solve_H_time = 0;
  84. kf.update_iterated_dyn_share_modified(LASER_POINT_COV, solve_H_time);
  85. state_point = kf.get_x();
  86. euler_cur = SO3ToEuler(state_point.rot);
  87. pos_lid = state_point.pos + state_point.rot * state_point.offset_T_L_I;
  88. geoQuat.x = state_point.rot.coeffs()[0];
  89. geoQuat.y = state_point.rot.coeffs()[1];
  90. geoQuat.z = state_point.rot.coeffs()[2];
  91. geoQuat.w = state_point.rot.coeffs()[3];
  92. double t_update_end = omp_get_wtime();
  93. /******* Publish odometry *******/
  94. publish_odometry(pubOdomAftMapped);
  95. /*** add the feature points to map kdtree ***/
  96. t3 = omp_get_wtime();
  97. map_incremental();
  98. t5 = omp_get_wtime();
  99. /******* Publish points *******/
  100. publish_path(pubPath);
  101. if (scan_pub_en || pcd_save_en) publish_frame_world(pubLaserCloudFull);
  102. if (scan_pub_en && scan_body_pub_en) {
  103. publish_frame_body(pubLaserCloudFull_body);
  104. publish_frame_lidar(pubLaserCloudFull_lidar);
  105. }
  106. // publish_effect_world(pubLaserCloudEffect);
  107. // publish_map(pubLaserCloudMap);
  108. /*** Debug variables ***/
  109. if (runtime_pos_log)
  110. {
  111. frame_num ++;
  112. kdtree_size_end = ikdtree.size();
  113. aver_time_consu = aver_time_consu * (frame_num - 1) / frame_num + (t5 - t0) / frame_num;
  114. aver_time_icp = aver_time_icp * (frame_num - 1)/frame_num + (t_update_end - t_update_start) / frame_num;
  115. aver_time_match = aver_time_match * (frame_num - 1)/frame_num + (match_time)/frame_num;
  116. aver_time_incre = aver_time_incre * (frame_num - 1)/frame_num + (kdtree_incremental_time)/frame_num;
  117. aver_time_solve = aver_time_solve * (frame_num - 1)/frame_num + (solve_time + solve_H_time)/frame_num;
  118. aver_time_const_H_time = aver_time_const_H_time * (frame_num - 1)/frame_num + solve_time / frame_num;
  119. T1[time_log_counter] = Measures.lidar_beg_time;
  120. s_plot[time_log_counter] = t5 - t0;
  121. s_plot2[time_log_counter] = feats_undistort->points.size();
  122. s_plot3[time_log_counter] = kdtree_incremental_time;
  123. s_plot4[time_log_counter] = kdtree_search_time;
  124. s_plot5[time_log_counter] = kdtree_delete_counter;
  125. s_plot6[time_log_counter] = kdtree_delete_time;
  126. s_plot7[time_log_counter] = kdtree_size_st;
  127. s_plot8[time_log_counter] = kdtree_size_end;
  128. s_plot9[time_log_counter] = aver_time_consu;
  129. s_plot10[time_log_counter] = add_point_size;
  130. time_log_counter ++;
  131. printf("[ mapping ]: time: IMU + Map + Input Downsample: %0.6f ave match: %0.6f ave solve: %0.6f ave ICP: %0.6f map incre: %0.6f ave total: %0.6f icp: %0.6f construct H: %0.6f \n",t1-t0,aver_time_match,aver_time_solve,t3-t1,t5-t3,aver_time_consu,aver_time_icp, aver_time_const_H_time);
  132. ext_euler = SO3ToEuler(state_point.offset_R_L_I);
  133. fout_out << setw(20) << Measures.lidar_beg_time - first_lidar_time << " " << euler_cur.transpose() << " " << state_point.pos.transpose()<< " " << ext_euler.transpose() << " "<<state_point.offset_T_L_I.transpose()<<" "<< state_point.vel.transpose() \
  134. <<" "<<state_point.bg.transpose()<<" "<<state_point.ba.transpose()<<" "<<state_point.grav<<" "<<feats_undistort->points.size()<<endl;
  135. dump_lio_state_to_log(fp);
  136. }
  137. }
  138. status = ros::ok();
  139. rate.sleep();
  140. }

6.基于IMU的状态转移和迭代卡尔曼滤波器

先看IMU的一些处理

待续

声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/菜鸟追梦旅行/article/detail/574059
推荐阅读
相关标签
  

闽ICP备14008679号