主要思路是,读入视频,隔帧采用SURF计算匹配的特征点,进而计算两图的投影映射矩阵,做差分二值化,连通域检测,绘制目标。
如果背景是静态的采用camshift即可。
本文方法速度debug下大概2-3帧,release下8-9帧(SURF部分,不包含连通域以及绘制),后续可增加选定目标,动态模版小邻域中跟踪目标。实现对动态背景下的运动目标检测,模版跟踪速度可达150帧。
环境:opencv2.4.9 + vs2012
#include <iostream>#include <opencv2/opencv.hpp>#include <opencv2/nonfree/nonfree.hpp> using namespace cv;using namespace std;void main(){ //VideoCapture capture(0); VideoCapture capture("3.mov"); Mat image01,image02,imgdiff; while (true) { //隔两帧配准 capture >> image01; if (image01.empty()) { break; } capture >> image02; capture >> image02; if (image02.empty()) { break; } //GaussianBlur(image02, image02, Size(3,3), 0); double time0 = static_cast<double>(getTickCount());//开始计时 //灰度图转换 Mat image1,image2; cvtColor(image01,image1,CV_RGB2GRAY); cvtColor(image02,image2,CV_RGB2GRAY); //提取特征点 SurfFeatureDetector surfDetector(2500); // 海塞矩阵阈值,高一点速度会快些 vector<KeyPoint> keyPoint1,keyPoint2; surfDetector.detect(image1,keyPoint1); surfDetector.detect(image2,keyPoint2); //特征点描述,为下边的特征点匹配做准备 SurfDescriptorExtractor SurfDescriptor; Mat imageDesc1,imageDesc2; SurfDescriptor.compute(image1,keyPoint1,imageDesc1); SurfDescriptor.compute(image2,keyPoint2,imageDesc2); //获得匹配特征点,并提取最优配对 FlannBasedMatcher matcher; vector<DMatch> matchePoints; matcher.match(imageDesc1,imageDesc2,matchePoints,Mat()); sort(matchePoints.begin(),matchePoints.end()); //特征点排序 //获取排在前N个的最优匹配特征点 vector<Point2f> imagePoints1,imagePoints2; for(int i=0; i<(int)(matchePoints.size()*0.25); i++) { imagePoints1.push_back(keyPoint1[matchePoints[i].queryIdx].pt); imagePoints2.push_back(keyPoint2[matchePoints[i].trainIdx].pt); } //获取图像1到图像2的投影映射矩阵 尺寸为3*3 Mat homo=findHomography(imagePoints1,imagePoints2,CV_RANSAC); //cout<<"变换矩阵为:/n"<<homo<<endl<<endl; //输出映射矩阵 //图像配准 Mat imageTransform1,imgpeizhun,imgerzhi; warpPerspective(image01,imageTransform1,homo,Size(image02.cols,image02.rows)); //imshow("经过透视矩阵变换后",imageTransform1); absdiff(image02, imageTransform1, imgpeizhun); //imshow("配准diff", imgpeizhun); threshold(imgpeizhun, imgerzhi, 50, 255.0 , CV_THRESH_BINARY); //imshow("配准二值化", imgerzhi); //输出所需时间 time0 = ((double)getTickCount()-time0)/getTickFrequency(); cout<<1/time0<<endl; Mat temp,image02temp; float m_BiLi = 0.9; image02temp = image02.clone(); cvtColor(imgerzhi,temp,CV_RGB2GRAY); //检索连通域 Mat se=getStructuringElement(MORPH_RECT, Size(5,5)); morphologyEx(temp, temp, MORPH_DILATE, se); vector<vector<Point>> contours; findContours(temp, contours, RETR_EXTERNAL, CHAIN_APPROX_NONE); if (contours.size()<1) { continue; } for (int k = 0; k < contours.size(); k++) { Rect bomen = boundingRect(contours[k]); //省略由于配准带来的边缘无效信息 if (bomen.x > image02temp.cols * (1 - m_BiLi) && bomen.y > image02temp.rows * (1 - m_BiLi) && bomen.x + bomen.width < image02temp.cols * m_BiLi && bomen.y + bomen.height < image02temp.rows * m_BiLi) { rectangle(image02temp, bomen, Scalar(255,0,255), 2, 8, 0); } } /* for (int i = 50; i < image02.rows - 100; i++) { for (int j = 50; j < image02.cols - 100; j++) { uchar pixel = temp.at<uchar>(i,j); if (pixel == 255) { Rect bomen(j-7, i-7, 14, 14); rectangle(image02, bomen, Scalar(255,255,255),1,8,0); } } } */ imshow("检测与跟踪",image02temp); waitKey(20); } }检测远处运动的车辆
新闻热点
疑难解答