当前位置:   article > 正文

ROS:二维坐标映射到三维坐标上(彩色与深度图像匹配)(基于深度相机D415)_深度图二维图匹配

深度图二维图匹配
  • 最近在用D435做物体识别抓取的时候发现二维坐标到三维坐标没有一个直接能用的从二维像素点坐标转换到三维坐标的ROS节点,自己之前写过的kinect V2的坐标映射的通用性太差,所以这次写了一个节点,利用深度相机ROS节点发布的深度与彩色对齐的图像话题和图像信息内外参话题,将二维坐标映射到三维坐标系。
  • 我挂上来的这个节点是将鼠标指向的二维坐标转换到三维坐标系下,并发布话题可视化到了rviz中,因为我自己的物体识别发布的像素坐标话题是一个自己写的消息文件,这次就不放出来了,需要用的把我这里的鼠标反馈的坐标改成你自己的坐标就行了。
  • 原理:深度相机会发布一个裁剪对齐好的深度图像或者彩色图像话题,以D415为例,它的ROS节点发布了/camera/aligned_depth_to_color/image_raw(即深度对齐到彩色的话题),这样只需要找到彩色图像的坐标影色到它的坐标读取一下深度,再通过内参矩阵计算就行了,而内参矩阵也通过了/camera/aligned_depth_to_color/camera_info话题发布出来,直接读取即可。
  • 效果图:(ps.为了满足gif尺寸限制只能牺牲下帧率了,左边的鼠标映射到右边的粉色的球)在这里插入图片描述

代码:

coordinate_map.cpp

/**********************
coordinate_map.cpp
author: wxw
email: wangxianwei1994@foxmail.com
time: 2019-7-29
**********************/

#include <iostream>
#include <string>
#include <opencv2/opencv.hpp>
#include <ros/ros.h>
#include <cv_bridge/cv_bridge.h>
#include <geometry_msgs/PointStamped.h>
#include <image_transport/image_transport.h>
#include <sensor_msgs/CameraInfo.h>
#include <sensor_msgs/Image.h>

using namespace cv;
using namespace std;
class ImageConverter {
private:
	ros::NodeHandle			nh_;
	image_transport::ImageTransport it_;
	image_transport::Subscriber	image_sub_color;//接收彩色图像
	image_transport::Subscriber	image_sub_depth;//接收深度图像

	ros::Subscriber camera_info_sub_;//接收深度图像对应的相机参数话题
	ros::Publisher	arm_point_pub_;//发布一个三维坐标点,可用于可视化

	sensor_msgs::CameraInfo		camera_info;
	geometry_msgs::PointStamped	output_point;

	/* Mat depthImage,colorImage; */
	Mat	colorImage;
	Mat	depthImage	= Mat::zeros( 480, 640, CV_16UC1 );//注意这里要修改为你接收的深度图像尺寸
	Point	mousepos	= Point( 0, 0 );        /* mousepoint to be map */

public:
	//获取鼠标的坐标,通过param指针传出到类成员Point mousepos
	static void on_mouse( int event, int x, int y, int flags, void *param )
	{
		switch ( event )
		{
		case CV_EVENT_MOUSEMOVE:                /* move mouse */
		{
			Point &tmppoint = *(cv::Point *) param;
			tmppoint = Point( x, y );
		} break;
		}
	}


	ImageConverter() : it_( nh_ )
	{
    //topic sub:
		image_sub_depth = it_.subscribe( "/camera/aligned_depth_to_color/image_raw",
						 1, &ImageConverter::imageDepthCb, this );
		image_sub_color = it_.subscribe( "/camera/color/image_raw", 1,
						 &ImageConverter::imageColorCb, this );
		camera_info_sub_ =
			nh_.subscribe( "/camera/aligned_depth_to_color/camera_info", 1,
				       &ImageConverter::cameraInfoCb, this );
    
    //topic pub:
		arm_point_pub_ =
			nh_.advertise<geometry_msgs::PointStamped>( "/mouse_point", 10 );

		cv::namedWindow( "colorImage" );
		setMouseCallback( "colorImage", &ImageConverter::on_mouse,
				  (void *) &mousepos );
	}


	~ImageConverter()
	{
		cv::destroyWindow( "colorImage" );
	}


	void cameraInfoCb( const sensor_msgs::CameraInfo &msg )
	{
		camera_info = msg;
	}


	void imageDepthCb( const sensor_msgs::ImageConstPtr &msg )
	{
		cv_bridge::CvImagePtr cv_ptr;

		try {
			cv_ptr =
				cv_bridge::toCvCopy( msg, sensor_msgs::image_encodings::TYPE_16UC1 );
			depthImage = cv_ptr->image;
		} catch ( cv_bridge::Exception &e ) {
			ROS_ERROR( "cv_bridge exception: %s", e.what() );
			return;
		}
	}


	void imageColorCb( const sensor_msgs::ImageConstPtr &msg )
	{
		cv_bridge::CvImagePtr cv_ptr;
		try {
			cv_ptr		= cv_bridge::toCvCopy( msg, sensor_msgs::image_encodings::BGR8 );
			colorImage	= cv_ptr->image;
		} catch ( cv_bridge::Exception &e ) {
			ROS_ERROR( "cv_bridge exception: %s", e.what() );
			return;
		}
		
		//先查询对齐的深度图像的深度信息,根据读取的camera info内参矩阵求解对应三维坐标
		float	real_z	= 0.001 * depthImage.at<u_int16_t>( mousepos.y, mousepos.x );
		float	real_x	=
			(mousepos.x - camera_info.K.at( 2 ) ) / camera_info.K.at( 0 ) * real_z;
		float real_y =
			(mousepos.y - camera_info.K.at( 5 ) ) / camera_info.K.at( 4 ) * real_z;

		char tam[100];
		sprintf( tam, "(%0.2f,%0.2f,%0.2f)", real_x, real_y, real_z );
		putText( colorImage, tam, mousepos, FONT_HERSHEY_SIMPLEX, 0.6,
			 cvScalar( 0, 0, 255 ), 1 );//打印到屏幕上
		circle( colorImage, mousepos, 2, Scalar( 255, 0, 0 ) );
		output_point.header.frame_id	= "/camera_depth_optical_frame";
		output_point.header.stamp	= ros::Time::now();
		output_point.point.x		= real_x;
		output_point.point.y		= real_y;
		output_point.point.z		= real_z;
		arm_point_pub_.publish( output_point );
		cv::imshow( "colorImage", colorImage );
		cv::waitKey( 1 );
	}
};

int main( int argc, char **argv )
{
	ros::init( argc, argv, "coordinate_map" );
	ImageConverter imageconverter;
	ros::spin();
	return(0);
}

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41
  • 42
  • 43
  • 44
  • 45
  • 46
  • 47
  • 48
  • 49
  • 50
  • 51
  • 52
  • 53
  • 54
  • 55
  • 56
  • 57
  • 58
  • 59
  • 60
  • 61
  • 62
  • 63
  • 64
  • 65
  • 66
  • 67
  • 68
  • 69
  • 70
  • 71
  • 72
  • 73
  • 74
  • 75
  • 76
  • 77
  • 78
  • 79
  • 80
  • 81
  • 82
  • 83
  • 84
  • 85
  • 86
  • 87
  • 88
  • 89
  • 90
  • 91
  • 92
  • 93
  • 94
  • 95
  • 96
  • 97
  • 98
  • 99
  • 100
  • 101
  • 102
  • 103
  • 104
  • 105
  • 106
  • 107
  • 108
  • 109
  • 110
  • 111
  • 112
  • 113
  • 114
  • 115
  • 116
  • 117
  • 118
  • 119
  • 120
  • 121
  • 122
  • 123
  • 124
  • 125
  • 126
  • 127
  • 128
  • 129
  • 130
  • 131
  • 132
  • 133
  • 134
  • 135
  • 136
  • 137
  • 138
  • 139
  • 140
  • 141
  • 142

CMakeList.txt

cmake_minimum_required(VERSION 2.8.3)
project(coordinate_map)

## Compile as C++11, supported in ROS Kinetic and newer
add_compile_options(-std=c++11)

find_package(catkin REQUIRED COMPONENTS roscpp rostime std_msgs sensor_msgs message_filters cv_bridge image_transport 
compressed_image_transport tf compressed_depth_image_transport  geometry_msgs )

## System dependencies are found with CMake's conventions
find_package(OpenCV REQUIRED)


catkin_package(

)


include_directories(include
  ${catkin_INCLUDE_DIRS}
  ${OpenCV_INCLUDE_DIRS}
)

add_executable(coordinate_map src/coordinate_map.cpp)
target_link_libraries(coordinate_map
  ${catkin_LIBRARIES}
  ${OpenCV_LIBRARIES}
)


  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30

package.xml

<?xml version="1.0"?>
<package>
  <name>coordinate_map</name>
  <version>1.0.0</version>
  <description>coordinate_map package</description>

  <maintainer email="wangxianwei1994@foxmail.com">Wangxianwei</maintainer>

  <license>BSD</license>

  <buildtool_depend>catkin</buildtool_depend>

  <build_depend>roscpp</build_depend>
  <build_depend>rostime</build_depend>
  <build_depend>std_msgs</build_depend>
  <build_depend>sensor_msgs</build_depend>
  <build_depend>message_filters</build_depend>
  <build_depend>image_transport</build_depend>
  <build_depend>compressed_image_transport</build_depend>
  <build_depend>compressed_depth_image_transport</build_depend>
  <build_depend>cv_bridge</build_depend>
  <build_depend>tf</build_depend>
  <build_depend>nav_msgs</build_depend>

  <run_depend>message_runtime</run_depend>
  <run_depend>roscpp</run_depend>
  <run_depend>rostime</run_depend>
  <run_depend>std_msgs</run_depend>
  <run_depend>sensor_msgs</run_depend>
  <run_depend>message_filters</run_depend>
  <run_depend>image_transport</run_depend>
  <run_depend>compressed_image_transport</run_depend>
  <run_depend>compressed_depth_image_transport</run_depend>
  <run_depend>cv_bridge</run_depend>
  <run_depend>tf</run_depend>
  <run_depend>nav_msgs</run_depend>


  <export>
  </export>
</package>

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41
  • 42
声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/IT小白/article/detail/90306
推荐阅读
相关标签
  

闽ICP备14008679号