赞
踩
函数简介:
- void LUT(
- InputArray src, //原始图像的地址;
- InputArray lut, //查找表的地址,对于多通道图像的查找,它可以有一个通道,也可以与原始图像有相同的通道;
- OutputArray dst //输出图像的地址。
- )
函数介绍(单通道为例):
对于8位单通道图片,其像素灰度为0-255,假如我们想将图像某一灰度值换成其他灰度值,用查找就很好用。
例如:我们想将一张图片灰度为0-100的像素的灰度变成0,101-200的变成100,201-255的变成255。我们就可已建立如下的一张表格;
当把此表格应用到图片时,图片0-100灰度的像素灰度就变成0,101-200的变成100,201-255的就变成255。映射表差不多就是这个意思。
典型用法(借助图像取反示例说明)是:
(虽然手动遍历可以达到同样效果,但尽量使用 OpenCV 内置函数。调用LUT 函数可以获得最快的速度,这是因为OpenCV库可以通过英特尔线程架构启用多线程。)
- //建立查找表
- Mat lookUpTable(1, 256, CV_8U);
- uchar *p = lookUpTable.data;
- for(int i=0; i<256; i++)
- p[i]=255-i;
- //通过LUT函数实现图像取反
- LUT(img1,lookUpTable,img1);
在OpenCV学习中经常看见saturate_cast的使用,为什么函数会用到saturate_cast呢,因为无论是加是减,乘除,都会超出一个像素灰度值的范围(0~255)所以,所以当运算完之后,结果为负,则转为0,结果超出255,则为255。另外在梯度锐化的函数里,也会涉及到saturate_cast。示例如下:
代码来自:https://blog.csdn.net/mjlsuccess/article/details/12401839
- //使用图像混合例子中的C语言版本演示
- for (int i=0; i<src1.rows; i++)
- {
- const uchar* src1_ptr = src1.ptr<uchar>(i);
- const uchar* src2_ptr = src2.ptr<uchar>(i);
- uchar* dst_ptr = dst.ptr<uchar>(i);
- for (int j=0; j<src1.cols*nChannels; j++)
- {
- //加溢出保护
- dst_ptr[j] = saturate_cast<uchar>(src1_ptr[j]*alpha + src2_ptr[j]*beta + gama);//gama = -100, alpha = beta = 0.5
- //不加溢出保护
- // dst_ptr[j] = (src1_ptr[j]*alpha + src2_ptr[j]*beta + gama);
- }
- }
- imshow("output",dst);
大致的原理应该如下:
- if(data<0)
- data=0;
- else if(data>255)
- data=255;
C++ Stack(堆栈) 是一个容器类的改编,为程序员提供了堆栈的全部功能,——也就是说实现了一个先进后出(FILO)的数据结构。
c++ stl栈stack的头文件为:
#include <stack>
c++ stl栈stack的成员函数介绍
- //操作 比较和分配堆栈
-
- empty() //堆栈为空则返回真
-
- pop() //移除栈顶元素
-
- push() //在栈顶增加元素
-
- size() //返回栈中元素数目
-
- top() //返回栈顶元素
此处参考网址:https://www.cnblogs.com/rednodel/p/5148156.html
- #include<iostream>
- using namespace std;
-
- class A
- {
- public:
- A( int i ){}
- };
-
- class B {
- public:
- B():a(1){}
- //或:B( int i ):a( i ){ }。对a提供参数一定要按这种形式,在冒号后,不能在花括号里面!
- private:
- A a;
- };
-
- void main()
- {
- B b;
- }
在读取矩阵元素是,以获取矩阵某行的地址时,需要指定数据类型。这样首先需要不停地写“<uchar>”,让人感觉很繁琐,在繁琐和烦躁中容易犯错,如下面代码中的错误,用at()获取矩阵元素时错误的使用了double类型。这种错误不是语法错误,因此在编译时编译器不会提醒。在程序运行时,at()函数获取到的不是期望的(i,j)位置处的元素,数据已经越界,但是运行时也未必会报错。这样的错误使得你的程序忽而看上去正常,忽而弹出“段错误”,特别是在代码规模很大时,难以查错。
如果使用Mat_类,那么就可以在变量声明时确定元素的类型,访问元素时不再需要制定元素类型,即使得代码简洁,又减少了出错的可能性。上面代码可以用Mat_实现,实现代码如下面例程里的第二个双重for循环。
- #include <iostream>
- #include "opencv2/opencv.hpp"
- #include<stdio.h>
-
- using namespace std;
- using namespace cv;
-
- int main(int argc,char* argv[])
- {
- Mat M(600,800,CV_8UC1);
- for(int i=0;i<M.rows;++i){
- //获取指针时需要指定类型
- uchar *p=M.ptr<uchar>(i);
- for(int j=0;j<M.cols;++j){
- double d1=(double)((i+j)%255);
- //用at读像素时,需要指定类型
- M.at<uchar>(i,j)=d1;
- double d2=M.at<uchar>(i,j);
- }
- }
-
-
- //在变量声明时,指定矩阵元素类型
- Mat_<uchar> M1=(Mat_<uchar>&)M;
- for(int i=0;i<M1.rows;++i)
- {
- //不需要指定元素类型,语言简洁
- uchar *p=M1.ptr(i);
-
-
- for(int j=0;j<M1.cols;++j){
- double d1=(double)((i+j)%255);
- //直接使用matlab风格的矩阵元素读写,简洁
- M1(i,j)=d1;
- double d2=M1(i,j);
- }
- }
- return 0;
- }
这里的代码来自(表示感谢):https://blog.csdn.net/FunnyWhiteCat/article/details/81387561
首先看原图:
处理流程思路:
源码:
- #include <opencv2\opencv.hpp>
- #include <iostream>
- #include <stack>
-
- using namespace cv;
- using namespace std;
-
- //c++中在一个类中定义另一个只有带参数构造函数的类的对象
- class CrackInfo
- {
- public:
- CrackInfo(Point& position, long length, float width) {};
- };
-
- /* 增加对比度 */
- void addContrast(Mat & srcImg);
- /* 交换两个Mat */
- void swapMat(Mat & srcImg, Mat & dstImg);
- /* 二值化图像。0->0,非0->255 */
- void binaryzation(Mat & srcImg);
- /* 检测连通域,并删除不符合条件的连通域 */
- void findConnectedDomain(Mat & srcImg, vector<vector<Point>>& connectedDomains, int area, int WHRatio);
- /* 提取连通域的骨架 */
- void thinImage(Mat & srcImg);
- /* 获取图像中白点的数量 */
- void getWhitePoints(Mat &srcImg, vector<Point>& domain);
- /* 计算宽高信息的放置位置 */
- Point calInfoPosition(int imgRows, int imgCols, int padding, const std::vector<cv::Point>& domain);
-
- int main(int argc, char** argv) {
- Mat srcImg = imread("./image/20180803215201452.jpg");
- Mat dstImg, dstImg2;
- //灰度化
- cvtColor(srcImg, dstImg, CV_BGR2GRAY, 1);
- //增加对比度
- addContrast(dstImg);
- //图像交换
- swapMat(srcImg, dstImg);
- //边缘检测
- Canny(srcImg, dstImg, 50, 150);
- //形态学变换
- Mat kernel = getStructuringElement(MORPH_ELLIPSE, Size(3, 3));
- dilate(dstImg, dstImg, kernel);//膨胀
- morphologyEx(dstImg, dstImg, CV_MOP_CLOSE, kernel, Point(-1, -1), 3);
- morphologyEx(dstImg, dstImg, CV_MOP_CLOSE, kernel);
- //寻找连通域
- vector<vector<Point>> connectedDomains;
- findConnectedDomain(dstImg, connectedDomains, 20, 3);
- kernel = getStructuringElement(MORPH_ELLIPSE, Size(7, 7));
- morphologyEx(dstImg, dstImg, CV_MOP_CLOSE, kernel, Point(-1, -1), 5);
-
- connectedDomains.clear();
- findConnectedDomain(dstImg, connectedDomains, 20, 3);
- kernel = getStructuringElement(MORPH_CROSS, Size(3, 3));
- morphologyEx(dstImg, dstImg, CV_MOP_OPEN, kernel);
-
- kernel = getStructuringElement(MORPH_ELLIPSE, Size(3, 3));
- erode(dstImg, dstImg, kernel);
-
- connectedDomains.clear();
- findConnectedDomain(dstImg, connectedDomains, 20, 3);
-
- cout << "开始测量" << endl;
- cout << "连通域数量:" << connectedDomains.size() << endl;
- Mat lookUpTable(1, 256, CV_8U, Scalar(0));
- vector<CrackInfo> crackInfos;
- for (auto domain_it = connectedDomains.begin(); domain_it != connectedDomains.end(); ++domain_it) {
- LUT(dstImg, lookUpTable, dstImg);
- for (auto point_it = domain_it->cbegin(); point_it != domain_it->cend(); ++point_it) {
- dstImg.ptr<uchar>(point_it->y)[point_it->x] = 255;
- }
- double area = (double)domain_it->size();
- thinImage(dstImg);
- getWhitePoints(dstImg, *domain_it);
- long length = (long)domain_it->size();
- Point position = calInfoPosition(dstImg.rows, dstImg.cols, 50, *domain_it);
- crackInfos.push_back(CrackInfo(position, length, (float)(area / length)));
- }
-
- cout << "开始绘制信息" << endl;
- cout << "信息数量:" << crackInfos.size() << endl;
-
- LUT(dstImg, lookUpTable, dstImg);
- for (auto domain_it = connectedDomains.cbegin(); domain_it != connectedDomains.cend(); ++domain_it) {
- for (auto point_it = domain_it->cbegin(); point_it != domain_it->cend(); ++point_it) {
- dstImg.ptr<uchar>(point_it->y)[point_it->x] = 255;
- }
- }
-
- //ostringstream info;
- //for (auto it = crackInfos.cbegin(); it != crackInfos.cend(); ++it) {
- // info.str("");
- // info << *it;
- // putText(dstImg, info.str(), it->Position, FONT_HERSHEY_SIMPLEX, 0.5, Scalar(255));
- //}
-
- imwrite("result1.png", dstImg);
- cout << "保存图像完成" << endl;
- return 0;
- }
-
- /*利用查找表(Look-up table)增加图像对比度*/
- void addContrast(Mat & srcImg) {
- Mat lookUpTable(1, 256, CV_8U);
- double temp = pow(1.1, 5);
- uchar* p = lookUpTable.data;
- for (int i = 0; i < 256; ++i)
- p[i] = saturate_cast<uchar>(i * temp);
- LUT(srcImg, lookUpTable, srcImg);
- }
- /*图像交换*/
- void swapMat(Mat & srcImg, Mat & dstImg) {
- Mat tempImg = srcImg;
- srcImg = dstImg;
- dstImg = tempImg;
- }
- /* 检测连通域,并删除不符合条件的连通域 */
- void findConnectedDomain(Mat & srcImg, vector<vector<Point>>& connectedDomains, int area, int WHRatio) {
- Mat_<uchar> tempImg = (Mat_<uchar> &)srcImg;
-
- for (int i = 0; i < tempImg.rows; ++i) {
- uchar* row = tempImg.ptr(i); 调取存储图像内存的第i行的指针
- for (int j = 0; j < tempImg.cols; ++j) {
- if (row[j] == 255) {
- stack<Point> connectedPoints;
- vector<Point> domain;
- connectedPoints.push(Point(j, i));
- while (!connectedPoints.empty()) {
- Point currentPoint = connectedPoints.top();
- domain.push_back(currentPoint);
-
- int colNum = currentPoint.x;
- int rowNum = currentPoint.y;
-
- tempImg.ptr(rowNum)[colNum] = 0;
- connectedPoints.pop();
-
- if (rowNum - 1 >= 0 && colNum - 1 >= 0 && tempImg.ptr(rowNum - 1)[colNum - 1] == 255) {
- tempImg.ptr(rowNum - 1)[colNum - 1] = 0;
- connectedPoints.push(Point(colNum - 1, rowNum - 1));
- }
- if (rowNum - 1 >= 0 && tempImg.ptr(rowNum - 1)[colNum] == 255) {
- tempImg.ptr(rowNum - 1)[colNum] = 0;
- connectedPoints.push(Point(colNum, rowNum - 1));
- }
- if (rowNum - 1 >= 0 && colNum + 1 < tempImg.cols && tempImg.ptr(rowNum - 1)[colNum + 1] == 255) {
- tempImg.ptr(rowNum - 1)[colNum + 1] = 0;
- connectedPoints.push(Point(colNum + 1, rowNum - 1));
- }
- if (colNum - 1 >= 0 && tempImg.ptr(rowNum)[colNum - 1] == 255) {
- tempImg.ptr(rowNum)[colNum - 1] = 0;
- connectedPoints.push(Point(colNum - 1, rowNum));
- }
- if (colNum + 1 < tempImg.cols && tempImg.ptr(rowNum)[colNum + 1] == 255) {
- tempImg.ptr(rowNum)[colNum + 1] = 0;
- connectedPoints.push(Point(colNum + 1, rowNum));
- }
- if (rowNum + 1 < tempImg.rows && colNum - 1 > 0 && tempImg.ptr(rowNum + 1)[colNum - 1] == 255) {
- tempImg.ptr(rowNum + 1)[colNum - 1] = 0;
- connectedPoints.push(Point(colNum - 1, rowNum + 1));
- }
- if (rowNum + 1 < tempImg.rows && tempImg.ptr(rowNum + 1)[colNum] == 255) {
- tempImg.ptr(rowNum + 1)[colNum] = 0;
- connectedPoints.push(Point(colNum, rowNum + 1));
- }
- if (rowNum + 1 < tempImg.rows && colNum + 1 < tempImg.cols && tempImg.ptr(rowNum + 1)[colNum + 1] == 255) {
- tempImg.ptr(rowNum + 1)[colNum + 1] = 0;
- connectedPoints.push(Point(colNum + 1, rowNum + 1));
- }
- }
- if (domain.size() > area) {
- RotatedRect rect = minAreaRect(domain);
- float width = rect.size.width;
- float height = rect.size.height;
- if (width < height) {
- float temp = width;
- width = height;
- height = temp;
- }
- if (width > height * WHRatio && width > 50) {
- for (auto cit = domain.begin(); cit != domain.end(); ++cit) {
- tempImg.ptr(cit->y)[cit->x] = 250;
- }
- connectedDomains.push_back(domain);
- }
- }
- }
- }
- }
-
- binaryzation(srcImg);
- }
- /* 二值化图像。0->0,非0->255 */
- void binaryzation(Mat & srcImg) {
- Mat lookUpTable(1, 256, CV_8U, Scalar(255));
- lookUpTable.data[0] = 0;
- LUT(srcImg, lookUpTable, srcImg);
- }
- /* 提取连通域的骨架 */
- void thinImage(Mat & srcImg) {
- vector<Point> deleteList;
- int neighbourhood[9];
- int nl = srcImg.rows;
- int nc = srcImg.cols;
- bool inOddIterations = true;
- while (true) {
- for (int j = 1; j < (nl - 1); j++) {
- uchar* data_last = srcImg.ptr<uchar>(j - 1);
- uchar* data = srcImg.ptr<uchar>(j);
- uchar* data_next = srcImg.ptr<uchar>(j + 1);
- for (int i = 1; i < (nc - 1); i++) {
- if (data[i] == 255) {
- int whitePointCount = 0;
- neighbourhood[0] = 1;
- if (data_last[i] == 255) neighbourhood[1] = 1;
- else neighbourhood[1] = 0;
- if (data_last[i + 1] == 255) neighbourhood[2] = 1;
- else neighbourhood[2] = 0;
- if (data[i + 1] == 255) neighbourhood[3] = 1;
- else neighbourhood[3] = 0;
- if (data_next[i + 1] == 255) neighbourhood[4] = 1;
- else neighbourhood[4] = 0;
- if (data_next[i] == 255) neighbourhood[5] = 1;
- else neighbourhood[5] = 0;
- if (data_next[i - 1] == 255) neighbourhood[6] = 1;
- else neighbourhood[6] = 0;
- if (data[i - 1] == 255) neighbourhood[7] = 1;
- else neighbourhood[7] = 0;
- if (data_last[i - 1] == 255) neighbourhood[8] = 1;
- else neighbourhood[8] = 0;
- for (int k = 1; k < 9; k++) {
- whitePointCount = whitePointCount + neighbourhood[k];
- }
- if ((whitePointCount >= 2) && (whitePointCount <= 6)) {
- int ap = 0;
- if ((neighbourhood[1] == 0) && (neighbourhood[2] == 1)) ap++;
- if ((neighbourhood[2] == 0) && (neighbourhood[3] == 1)) ap++;
- if ((neighbourhood[3] == 0) && (neighbourhood[4] == 1)) ap++;
- if ((neighbourhood[4] == 0) && (neighbourhood[5] == 1)) ap++;
- if ((neighbourhood[5] == 0) && (neighbourhood[6] == 1)) ap++;
- if ((neighbourhood[6] == 0) && (neighbourhood[7] == 1)) ap++;
- if ((neighbourhood[7] == 0) && (neighbourhood[8] == 1)) ap++;
- if ((neighbourhood[8] == 0) && (neighbourhood[1] == 1)) ap++;
- if (ap == 1) {
- if (inOddIterations && (neighbourhood[3] * neighbourhood[5] * neighbourhood[7] == 0)
- && (neighbourhood[1] * neighbourhood[3] * neighbourhood[5] == 0)) {
- deleteList.push_back(Point(i, j));
- }
- else if (!inOddIterations && (neighbourhood[1] * neighbourhood[5] * neighbourhood[7] == 0)
- && (neighbourhood[1] * neighbourhood[3] * neighbourhood[7] == 0)) {
- deleteList.push_back(Point(i, j));
- }
- }
- }
- }
- }
- }
- if (deleteList.size() == 0)
- break;
- for (size_t i = 0; i < deleteList.size(); i++) {
- Point tem;
- tem = deleteList[i];
- uchar* data = srcImg.ptr<uchar>(tem.y);
- data[tem.x] = 0;
- }
- deleteList.clear();
-
- inOddIterations = !inOddIterations;
- }
- }
- /* 获取图像中白点的数量 */
- void getWhitePoints(Mat &srcImg, vector<Point>& domain) {
- domain.clear();
- Mat_<uchar> tempImg = (Mat_<uchar> &)srcImg;
- for (int i = 0; i < tempImg.rows; i++) {
- uchar * row = tempImg.ptr<uchar>(i);
- for (int j = 0; j < tempImg.cols; ++j) {
- if (row[j] != 0)
- domain.push_back(Point(j, i));
- }
- }
- }
- /* 计算宽高信息的放置位置 */
- Point calInfoPosition(int imgRows, int imgCols, int padding, const std::vector<cv::Point>& domain) {
- long xSum = 0;
- long ySum = 0;
- for (auto it = domain.cbegin(); it != domain.cend(); ++it) {
- xSum += it->x;
- ySum += it->y;
- }
- int x = 0;
- int y = 0;
- x = (int)(xSum / domain.size());
- y = (int)(ySum / domain.size());
- if (x < padding)
- x = padding;
- if (x > imgCols - padding)
- x = imgCols - padding;
- if (y < padding)
- y = padding;
- if (y > imgRows - padding)
- y = imgRows - padding;
-
- return cv::Point(x, y);
- }
处理结果:
待优化......
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。