19,468
社区成员
发帖
与我相关
我的任务
分享
#include <QCoreApplication>
#include <iostream>
#include<math.h>
#include <UEyeOpenCV.hpp>
#include "opencv2/opencv.hpp"
#include "opencv2/video/tracking.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/objdetect/objdetect.hpp"
using namespace cv;
cv::Mat image;
cv::Mat frame;
cv::Mat image_roi;
cv::Mat hsv;
cv::Mat hue;
cv::Mat mask;
cv::Mat hist;
cv::Mat histimg = Mat::zeros(200, 320, CV_8UC3);
cv::Mat backproj;
bool backprojMode = false; //表示是否要进入反向投影模式,ture表示准备进入反向投影模式
bool selectObject = false;//代表是否在选要跟踪的初始目标,true表示正在用鼠标选择
int trackObject = 0; //代表跟踪目标数目
Point origin;//用于保存鼠标选择第一次单击时点的位置
Rect selection;//用于保存鼠标选择的矩形框
void onMouse( int event, int x, int y, int, void* ){
if( selectObject )//只有当鼠标左键按下去时才有效,然后通过if里面代码就可以确定所选择的矩形区域selection了
{
selection.x = MIN(x, origin.x);//矩形左上角顶点坐标
selection.y = MIN(y, origin.y);
selection.width = std::abs(x - origin.x);//矩形宽
selection.height = std::abs(y - origin.y);//矩形高
selection &= Rect(0, 0, image.cols, image.rows);//用于确保所选的矩形区域在图片范围内
}
switch( event )
{
case CV_EVENT_LBUTTONDOWN:
origin = Point(x,y);
selection = Rect(x,y,0,0);//鼠标刚按下去时初始化了一个矩形区域
selectObject = true;
break;
case CV_EVENT_LBUTTONUP:
selectObject = false;
if( selection.width > 0 && selection.height > 0 )
trackObject = -1;
break;
}
}
int main(int argc, char *argv[])
{
QCoreApplication a(argc, argv);
/* set undistort */
cv::FileStorage fs("out_camera_data.xml", cv::FileStorage::READ);
cv::Mat cameraMatrix, distCoeffs;
fs["Camera_Matrix"] >> cameraMatrix;
fs["Distortion_Coefficients"] >> distCoeffs;
std::cout << "camera matrix: " << cameraMatrix << std::endl
<< "distortion coeffs: " << distCoeffs << std::endl;
/* open camera */
UeyeOpencvCam cam = UeyeOpencvCam(752,480);
/* set com*/
ServoController com;
com.ServoController::createSerialPort();
/* OpenCV */
cv::namedWindow( "CamShift", CV_WINDOW_AUTOSIZE);
cv::setMouseCallback( "CamShift", onMouse, 0 );//消息响应机制
for(;;){
frame= cam.getFrame();
//cv::cvtColor(frame,grayFrame, CV_BGR2GRAY);
cv::undistort(frame,image,cameraMatrix,distCoeffs);
cv::flip(image,image,-1);
if (trackObject<0){
image_roi = image(selection);
cv::namedWindow( "selection", CV_WINDOW_AUTOSIZE);
cv::imshow("selection",image_roi);
trackObject=1;
}
if(trackObject>0){
cvtColor(image, hsv, CV_BGR2HSV);//将rgb摄像头帧转化成hsv空间的
hue.create(hsv.size(), hsv.depth());//hue初始化为与hsv大小深度一样的矩阵,色调的度量是用角度表示的,红绿蓝之间相差120度,反色相差180度
inRange(hsv, Scalar(0, 30, 10), Scalar(180, 256, 256), mask);
int ch[] = {0, 0};
mixChannels(&hsv, 1, &hue, 1, ch, 1);//将hsv第一个通道(也就是色调)的数复制到hue中,0索引数
Mat roi(hue, selection), maskroi(mask, selection);
int hsize = 16;
float hranges[] = {0,180};//hranges在后面的计算直方图函数中要用到
const float* phranges = hranges;
cv::calcHist(&roi, 1, 0, maskroi, hist, 1, &hsize, &phranges);
cv::normalize(hist, hist, 0, 255, CV_MINMAX);
cv::calcBackProject(&hue, 1, 0, hist, backproj, &phranges);//计算直方图的反向投影,计算hue图像0通道直方图hist的反向投影,并让入backproj中
backproj &= mask;
Rect trackWindow= selection;
//trackWindow为鼠标选择的区域,TermCriteria为确定迭代终止的准则
RotatedRect trackBox = CamShift(backproj, trackWindow, TermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ));
// RotatedRect
Point2f vertices[4];
trackBox.points(vertices);
for (int i = 0; i < 4; i++)
cv::line(image, vertices[i], vertices[(i+1)%4], Scalar(0,255,0));
}
cv::imshow("CamShift",image);
if( cv::waitKey(30)>=0 ) break;
}
return a.exec();
}
SOURCES += \
main.cpp
INCLUDEPATH += /usr/local/include \
/usr/local/include/opencv \
/usr/local/include/opencv2
LIBS += /usr/local/lib/libopencv_core.so \
/usr/local/lib/libopencv_highgui.so \
/usr/local/lib/libopencv_imgproc.so \
#include "opencv2/video/tracking.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/highgui/highgui.hpp"
#include <iostream>
#include <ctype.h>
using namespace cv;
using namespace std;
void help()
{
cout << "\nThis is a demo that shows mean-shift based tracking\n"
<< "You select a color objects such as your face and it tracks it.\n"
<< "This reads from video camera (0 by default, or the camera number the user enters\n"
<< "Call:\n"
<< "\n./camshiftdemo [camera number]"
<< "\n" << endl;
cout << "\n\nHot keys: \n"
"\tESC - quit the program\n"
"\tc - stop the tracking\n"
"\tb - switch to/from backprojection view\n"
"\th - show/hide object histogram\n"
"\tp - pause video\n"
"To initialize tracking, select the object with mouse\n" << endl;
}
Mat image;
bool backprojMode = false;
bool selectObject = false;
int trackObject = 0;
bool showHist = true;
Point origin;
Rect selection;
int vmin = 10, vmax = 256, smin = 30;
void onMouse( int event, int x, int y, int, void* )
{
if( selectObject )
{
selection.x = MIN(x, origin.x);
selection.y = MIN(y, origin.y);
selection.width = std::abs(x - origin.x);
selection.height = std::abs(y - origin.y);
selection &= Rect(0, 0, image.cols, image.rows);
}
switch( event )
{
case CV_EVENT_LBUTTONDOWN:
origin = Point(x,y);
selection = Rect(x,y,0,0);
selectObject = true;
break;
case CV_EVENT_LBUTTONUP:
selectObject = false;
if( selection.width > 0 && selection.height > 0 )
trackObject = -1;
break;
}
}
int main( int argc, char** argv )
{
VideoCapture cap;
Rect trackWindow;
RotatedRect trackBox;
int hsize = 16;
float hranges[] = {0,180};
const float* phranges = hranges;
if( argc == 1 || (argc == 2 && strlen(argv[1]) == 1 && isdigit(argv[1][0])))
cap.open(argc == 2 ? argv[1][0] - '0' : 0);
else if( argc == 2 )
cap.open(argv[1]);
if( !cap.isOpened() )
{
help();
cout << "***Could not initialize capturing...***\n";
return 0;
}
help();
namedWindow( "Histogram", 0 );
namedWindow( "CamShift Demo", 0 );
setMouseCallback( "CamShift Demo", onMouse, 0 );
createTrackbar( "Vmin", "CamShift Demo", &vmin, 256, 0 );
createTrackbar( "Vmax", "CamShift Demo", &vmax, 256, 0 );
createTrackbar( "Smin", "CamShift Demo", &smin, 256, 0 );
Mat frame, hsv, hue, mask, hist, histimg = Mat::zeros(200, 320, CV_8UC3), backproj;
bool paused = false;
for(;;)
{
if( !paused )
{
cap >> frame;
if( frame.empty() )
break;
}
frame.copyTo(image);
if( !paused )
{
cvtColor(image, hsv, CV_BGR2HSV);
if( trackObject )
{
int _vmin = vmin, _vmax = vmax;
inRange(hsv, Scalar(0, smin, MIN(_vmin,_vmax)),
Scalar(180, 256, MAX(_vmin, _vmax)), mask);
int ch[] = {0, 0};
hue.create(hsv.size(), hsv.depth());
mixChannels(&hsv, 1, &hue, 1, ch, 1);
if( trackObject < 0 )
{
Mat roi(hue, selection), maskroi(mask, selection);
calcHist(&roi, 1, 0, maskroi, hist, 1, &hsize, &phranges);
normalize(hist, hist, 0, 255, CV_MINMAX);
trackWindow = selection;
trackObject = 1;
histimg = Scalar::all(0);
int binW = histimg.cols / hsize;
Mat buf(1, hsize, CV_8UC3);
for( int i = 0; i < hsize; i++ )
buf.at<Vec3b>(i) = Vec3b(saturate_cast<uchar>(i*180./hsize), 255, 255);
cvtColor(buf, buf, CV_HSV2BGR);
for( int i = 0; i < hsize; i++ )
{
int val = saturate_cast<int>(hist.at<float>(i)*histimg.rows/255);
rectangle( histimg, Point(i*binW,histimg.rows),
Point((i+1)*binW,histimg.rows - val),
Scalar(buf.at<Vec3b>(i)), -1, 8 );
}
}
calcBackProject(&hue, 1, 0, hist, backproj, &phranges);
backproj &= mask;
RotatedRect trackBox = CamShift(backproj, trackWindow,
TermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ));
if( trackWindow.area() <= 1 )
{
int cols = backproj.cols, rows = backproj.rows, r = (MIN(cols, rows) + 5)/6;
trackWindow = Rect(trackWindow.x - r, trackWindow.y - r,
trackWindow.x + r, trackWindow.y + r) &
Rect(0, 0, cols, rows);
}
if( backprojMode )
cvtColor( backproj, image, CV_GRAY2BGR );
ellipse( image, trackBox, Scalar(0,0,255), 3, CV_AA );
}
}
else if( trackObject < 0 )
paused = false;
if( selectObject && selection.width > 0 && selection.height > 0 )
{
Mat roi(image, selection);
bitwise_not(roi, roi);
}
imshow( "CamShift Demo", image );
imshow( "Histogram", histimg );
char c = (char)waitKey(10);
if( c == 27 )
break;
switch(c)
{
case 'b':
backprojMode = !backprojMode;
break;
case 'c':
trackObject = 0;
histimg = Scalar::all(0);
break;
case 'h':
showHist = !showHist;
if( !showHist )
destroyWindow( "Histogram" );
else
namedWindow( "Histogram", 1 );
break;
case 'p':
paused = !paused;
break;
default:
;
}
}
return 0;
}