标签:opencv meanshift 图像聚类 物体检测 反向投影
for(s=top;s<=down;s++){
for(t=left;t<=right;t++)
{
ws=(s-m)*(s-m)+(t-n)*(t-n);//spatial information
ws/=(hs*hs);
ws=exp(-ws);
wr=(data[s*step+t*channels]-data[m*step+n*channels])*(data[s*step+t*channels]-data[m*step+n*channels]);
wr+=(data[s*step+t*channels+1]-data[m*step+n*channels+1])*(data[s*step+t*channels+1]-data[m*step+n*channels+1]);
wr+=(data[s*step+t*channels+2]-data[m*step+n*channels+2])*(data[s*step+t*channels+2]-data[m*step+n*channels+2]);
wr/=(hr*hr);
if(wr>1)
wr=0.;
else
wr=exp(-wr);
sumw+=wr*ws;
for(k=0;k<5;k++)//try
y[1][k]+=oridata[s*width+t][k]*wr*ws;
}
}
for(k=0;k<5;k++) //try
y[1][k]/=sumw;
//下一个要到的点
m=(int)(y[1][0]+0.5);
n=(int)(y[1][1]+0.5);假设我们有了反向投影图,再用meanshift,不就能找到图像中最接近当前区域的位置了么~~//#include <stdafx.h>
#include <cxcore.h>
#include <cv.h>
#include <highgui.h>
#include <stdio.h>
#include <vector>
#include <utility>
using namespace cv;
using namespace std;
#define hs 25
#define hr 25
#define maxstep 100
#define EPSLON 0.01
#define width 1920
#define height 1080
float oridata[height*width][5];//try
int visited[height][width];
int main(int argc,char **argv)
{
IplImage *oriImg,*luvImg,*fltImg,*afterImg;
char *filename = argv[1];
oriImg=cvLoadImage(filename,1);
if(!oriImg){
printf("cannot load the file.\n");
return -1;
}
luvImg=cvCreateImage(cvSize(width,height),oriImg->depth,oriImg->nChannels);
fltImg=cvCreateImage(cvSize(width,height),oriImg->depth,oriImg->nChannels);
afterImg=cvCreateImage(cvSize(width,height),oriImg->depth,oriImg->nChannels);
cvCvtColor(oriImg,luvImg,CV_RGB2Luv);
uchar *data,*newdata;
int channels, step,depth;
depth=luvImg->depth;
step=luvImg->widthStep;
channels=luvImg->nChannels;
data=(uchar *) luvImg->imageData;
newdata=(uchar *) fltImg->imageData;
int i,j,s,t,k,m,n,index;
int top,down,left,right;
float ws,wr;
float mhlength;
float sumw;
float y[2][5];//try, otherwise the second is 5
for(i=0;i<height;i++)
{
for(j=0;j<width;j++)
{
oridata[i*width+j][0]=i;
oridata[i*width+j][1]=j;
visited[i][j]=0;
oridata[i*width+j][2]=data[i*step+j*channels];
oridata[i*width+j][3]=data[i*step+j*channels+1];
oridata[i*width+j][4]=data[i*step+j*channels+2];
}
}
for(i=0;i<height;i++)
{
for(j=0;j<width;j++)
{
if(visited[i][j])
continue;
m=i;n=j;//当前的中心点
for(k=0;k<5;k++)//try
y[0][k]=oridata[i*width+j][k];//向量的初始值
vector<pair<int,int> >vss;
for(index=0;index<maxstep;index++)//对当前的结点而言,最多迭代100次
{
pair<int,int>newone;
newone = make_pair(m,n);
vss.push_back(newone);
for(k=0;k<5;k++)//try
y[1][k]=0;
mhlength=0.;
sumw=0.;
top=m-hs;
down=m+hs;
left=n-hs;
right=n+hs;
if(top<0) top =0;
if(down>height-1) down=height-1;
if(left<0) left=0;
if(right>width-1) right=width-1;
for(s=top;s<=down;s++)
{
for(t=left;t<=right;t++)
{
ws=(s-m)*(s-m)+(t-n)*(t-n);//spatial information
ws/=(hs*hs);
ws=exp(-ws);
//ws=1-ws+(ws*ws)/2-(ws*ws*ws)/6+(ws*ws*ws*ws)/24-(ws*ws*ws*ws*ws)/120;
wr=(data[s*step+t*channels]-data[m*step+n*channels])*(data[s*step+t*channels]-data[m*step+n*channels]);
wr+=(data[s*step+t*channels+1]-data[m*step+n*channels+1])*(data[s*step+t*channels+1]-data[m*step+n*channels+1]);
wr+=(data[s*step+t*channels+2]-data[m*step+n*channels+2])*(data[s*step+t*channels+2]-data[m*step+n*channels+2]);
wr/=(hr*hr);
if(wr>1)
wr=0.;
else
wr=exp(-wr);
sumw+=wr*ws;
for(k=0;k<5;k++)//try
y[1][k]+=oridata[s*width+t][k]*wr*ws;
}
}
for(k=0;k<5;k++) //try
y[1][k]/=sumw;
//下一个要到的点
m=(int)(y[1][0]+0.5);
n=(int)(y[1][1]+0.5);
if(visited[m][n])
break;
if(m<hs||m>height-hs||n<hs||n>width-hs)
break;
else{
for(k=0;k<5;k++)//try
{
mhlength+=(y[1][k]-y[0][k])*(y[1][k]-y[0][k]);
y[0][k]=y[1][k];
}
mhlength=sqrt(mhlength);
if(mhlength<EPSLON)//找到极值点
break;
}
}//邻域处理结束
for(int ii = 0;ii < vss.size();ii++)
{
int row = vss[ii].first;
int line = vss[ii].second;
newdata[row*step+line*channels]=int(y[1][2]+0.5);//try
newdata[row*step+line*channels+1]=int(y[1][3]+0.5);
newdata[row*step+line*channels+2]=int(y[1][4]+0.5);
visited[row][line]=1;
}
}
}
cvCvtColor(fltImg,afterImg,CV_Luv2RGB);
cvNamedWindow("ori",1);
cvNamedWindow("filtered",1);
cvShowImage("ori",oriImg);
cvShowImage("filtered",afterImg);
cvWaitKey(0);
cvReleaseImage(&oriImg);
cvDestroyWindow("image");
cvReleaseImage(&afterImg);
cvDestroyWindow("filtered");
return 0;
}
|
|
The constructors.
|
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/video/tracking.hpp"
#include <iostream>
using namespace cv;
using namespace std;
class ColorHistogram {
private:
int histSize[3];
float hranges[2];
const float* ranges[3];
int channels[3];
public:
ColorHistogram() {
// Prepare arguments for a color histogram
histSize[0]= histSize[1]= histSize[2]= 256;
hranges[0]= 0.0; // BRG range
hranges[1]= 255.0;
ranges[0]= hranges; // all channels have the same range
ranges[1]= hranges;
ranges[2]= hranges;
channels[0]= 0; // the three channels
channels[1]= 1;
channels[2]= 2;
}
//这里获取HSV的第一个通道的直方图
Mat getHueHistogram(const cv::Mat &image,int minSaturation=0) {
Mat hist;
//色彩空间转换
Mat hsv;
cvtColor(image, hsv, CV_BGR2HSV);
// mask,要注意哦
Mat mask;
if (minSaturation>0) {
// 三通道分开
vector<Mat> v;
split(hsv,v);
// 去除低饱和的值
threshold(v[1],mask,minSaturation,255,THRESH_BINARY);
}
hranges[0]= 0.0; //hue的range是[0,180]
hranges[1]= 180.0;
channels[0]= 0; calcHist(&hsv,1,channels,mask,hist,1,histSize,ranges);//注意,mask不为0的地方计算在内,也就是高饱和的Hue计算在内
return hist;
}
};
class ContentFinder {
private:
float hranges[2];
const float* ranges[3];
int channels[3];
float threshold;
Mat histogram;
public:
ContentFinder() : threshold(-1.0f) {
hranges[0]= 0.0; // range [0,255]
hranges[1]= 255.0;
channels[0]= 0; // the three channels
channels[1]= 1;
channels[2]= 2;
ranges[0]= hranges; // all channels have same range
ranges[1]= hranges;
ranges[2]= hranges;
}
// Sets the reference histogram
void setHistogram(const Mat& h) {
histogram= h;
normalize(histogram,histogram,1.0);
}
cv::Mat find(const cv::Mat& image, float minValue, float maxValue, int *channels, int dim) {
cv::Mat result;
hranges[0]= minValue;
hranges[1]= maxValue;
ranges[0]= hranges; // all channels have same range
ranges[1]= hranges;
ranges[2]= hranges;
calcBackProject(&image,1,channels,histogram,result,ranges,255.0);
return result;
}
};
int main( int, char** argv )
{
Mat image= cv::imread("baboon1.jpg");
// Baboon‘s face ROI
Mat imageROI= image(cv::Rect(110,260,35,40));
// 获得Hue直方图
int minSat=65;
ColorHistogram hc;
Mat colorhist = hc.getHueHistogram(imageROI,minSat);
ContentFinder finder;
finder.setHistogram(colorhist);
image= cv::imread("baboon3.jpg");
// 色彩空间转换
Mat hsv;
cvtColor(image, hsv, CV_BGR2HSV);
vector<Mat> v;
split(hsv,v);
//除去低饱和度
threshold(v[1],v[1],minSat,255,cv::THRESH_BINARY);
// 获得反向投影
int ch[1]={0};
Mat result= finder.find(hsv,0.0f,180.0f,ch,1);
// 除去低饱和度的点
bitwise_and(result,v[1],result);
Rect rect(110,260,35,40);
rectangle(image, rect, Scalar(0,0,255));
TermCriteria criteria(TermCriteria::MAX_ITER,10,0.01);
meanShift(result,rect,criteria);
rectangle(image, rect, cv::Scalar(0,255,0));
namedWindow("result");
imshow("result",image);
waitKey(0);
return 0;
}
看下效果OpenCV2马拉松第11圈——meanshift与直方图反向投影,布布扣,bubuko.com
OpenCV2马拉松第11圈——meanshift与直方图反向投影
标签:opencv meanshift 图像聚类 物体检测 反向投影
原文地址:http://blog.csdn.net/abcd1992719g/article/details/25505315